hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9272ac6cfae08d41e8ef106ac9e7799355ded83b | 168 | py | Python | tools/_init_path.py | collector-m/ST3D | 720e04aa3dc4bb95ac336171b240b6c3130144e5 | [
"Apache-2.0"
] | 184 | 2021-03-09T12:19:49.000Z | 2022-03-31T09:19:05.000Z | tools/_init_path.py | collector-m/ST3D | 720e04aa3dc4bb95ac336171b240b6c3130144e5 | [
"Apache-2.0"
] | 36 | 2021-03-23T08:42:38.000Z | 2022-03-31T09:14:41.000Z | tools/_init_path.py | collector-m/ST3D | 720e04aa3dc4bb95ac336171b240b6c3130144e5 | [
"Apache-2.0"
] | 22 | 2021-03-10T09:32:27.000Z | 2022-03-28T05:01:45.000Z | import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '../'))
| 33.6 | 83 | 0.732143 | 29 | 168 | 3.965517 | 0.344828 | 0.26087 | 0.226087 | 0.243478 | 0.817391 | 0.817391 | 0.521739 | 0 | 0 | 0 | 0 | 0.012579 | 0.053571 | 168 | 4 | 84 | 42 | 0.710692 | 0 | 0 | 0 | 0 | 0 | 0.017857 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
928f2fd7a6729be1f17b9910f6da99c2b08ae64f | 213 | py | Python | tests/utils/test_dynamic_import.py | murthyn/composer | 2a04cf387dd8558556500f7ef2bc6d3d131043d5 | [
"Apache-2.0"
] | null | null | null | tests/utils/test_dynamic_import.py | murthyn/composer | 2a04cf387dd8558556500f7ef2bc6d3d131043d5 | [
"Apache-2.0"
] | null | null | null | tests/utils/test_dynamic_import.py | murthyn/composer | 2a04cf387dd8558556500f7ef2bc6d3d131043d5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 MosaicML. All Rights Reserved.
from functools import partial
from composer.utils import import_object
def test_dynamic_import_object():
assert import_object("functools:partial") is partial
| 23.666667 | 56 | 0.812207 | 28 | 213 | 6 | 0.642857 | 0.214286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021622 | 0.131455 | 213 | 8 | 57 | 26.625 | 0.886486 | 0.211268 | 0 | 0 | 0 | 0 | 0.10241 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.25 | true | 0 | 1 | 0 | 1.25 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2be3a0137fab6fe7b92269a264b4c52c32b7530b | 33,144 | py | Python | f5/bigip/tm/security/test/functional/test_scrubber.py | hixio-mh/f5-common-python | 53038d44afa381b70f6e2bb459f7b9b943f3172d | [
"Apache-2.0"
] | 272 | 2016-02-23T06:05:44.000Z | 2022-02-20T02:09:32.000Z | f5/bigip/tm/security/test/functional/test_scrubber.py | hixio-mh/f5-common-python | 53038d44afa381b70f6e2bb459f7b9b943f3172d | [
"Apache-2.0"
] | 1,103 | 2016-02-11T17:48:03.000Z | 2022-02-15T17:13:37.000Z | f5/bigip/tm/security/test/functional/test_scrubber.py | hixio-mh/f5-common-python | 53038d44afa381b70f6e2bb459f7b9b943f3172d | [
"Apache-2.0"
] | 167 | 2016-02-11T17:48:21.000Z | 2022-01-17T20:13:05.000Z | # Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from distutils.version import LooseVersion
from f5.bigip.resource import MissingRequiredCreationParameter
from f5.bigip.tm.security.scrubber import Profile
from f5.bigip.tm.security.scrubber import Scrubber_Categories
from f5.bigip.tm.security.scrubber import Scrubber_Netflow_Protected_Server
from f5.bigip.tm.security.scrubber import Scrubber_Rd_Network_Prefix
from f5.bigip.tm.security.scrubber import Scrubber_Rt_Domain
from f5.bigip.tm.security.scrubber import Scrubber_Virtual_Server
from requests.exceptions import HTTPError
DESC = 'TESTADDED'
@pytest.fixture(scope='function')
def scrubber_categories(mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
params_set = {'name': 'fake_categories_profile', 'blacklistCategory': '/Common/proxy', 'routeDomainName': '/Common/0', 'nextHop': '1.1.1.1'}
c1 = cat_lst.scrubber_categories.create(**params_set)
yield c1
c1.delete()
@pytest.fixture(scope='function')
def traffic_matching_criteria(mgmt_root):
tmc1 = mgmt_root.tm.security.protected_servers.traffic_matching_criteria_s.traffic_matching_criteria.create(
name='fake_tmc', destinationAddressInline='1.1.1.1', sourceAddressInline='2.2.2.2', partition='Common')
yield tmc1
tmc1.delete()
@pytest.fixture(scope='function')
def netflow_protected_server(mgmt_root, traffic_matching_criteria):
tmc = traffic_matching_criteria
nps1 = mgmt_root.tm.security.protected_servers.netflow_protected_server_s.netflow_protected_server.create(
name='fake_nps', trafficMatchingCriteria=tmc.fullPath, partition='Common')
yield nps1
nps1.delete()
@pytest.fixture(scope='function')
def scrubber_netflow_protected_server(mgmt_root, netflow_protected_server):
nps = netflow_protected_server
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
params_set = {'name': 'fake_netflow_protected_server_profile', 'npsName': nps.fullPath, 'nextHop': '1.1.1.1'}
nps1 = nps_lst.scrubber_netflow_protected_server.create(**params_set)
yield nps1
nps1.delete()
@pytest.fixture(scope='function')
def virtual(mgmt_root):
vs = mgmt_root.tm.ltm.virtuals.virtual.create(
name='fake_virtual', partition='Common', protocol='tcp')
yield vs
vs.delete()
@pytest.fixture(scope='function')
def scrubber_virtual_server(mgmt_root, virtual):
vs = virtual
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_virtual_server_s
params_set = {'name': 'fake_virtual_server_profile', 'vsName': vs.fullPath, 'nextHop': '1.1.1.1'}
v1 = cat_lst.scrubber_virtual_server.create(**params_set)
yield v1
v1.delete()
@pytest.fixture(scope='function')
def scrubber_rt_domain(mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
yield rd1
rd1.delete()
@pytest.fixture(scope='function')
def scrubber_rd_network_prefix(mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
rd_net_lst = rd1.scrubber_rd_network_prefix_s
net_params_set = {'name': 'fake_rd_network_prefix_profile', 'dstIp': '1.1.1.0', 'mask': '24', 'nextHop': '2.2.2.2'}
rd_net1 = rd_net_lst.scrubber_rd_network_prefix.create(**net_params_set)
yield rd_net1
rd_net1.delete()
rd1.delete()
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestProfile(object):
def test_load_no_object(self, mgmt_root):
p = mgmt_root.tm.security.scrubber.profile_s.profile
with pytest.raises(HTTPError) as err:
p.load(name='not_exists', partition='Common')
assert err.value.response.status_code == 404
def test_load_and_update(self, mgmt_root):
p = mgmt_root.tm.security.scrubber.profile_s.profile
p1 = p.load(name='scrubber-profile-default', partition='Common')
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default'
assert p1.name == 'scrubber-profile-default'
assert p1.partition == 'Common'
assert p1.selfLink.startswith(URI)
p1.advertisementTtl = 500
p1.update()
p2 = p.load(name='scrubber-profile-default', partition='Common')
assert p1.name == p2.name
assert p1.partition == p2.partition
assert p1.selfLink == p2.selfLink
assert p2.advertisementTtl == '500'
def test_profile_collection(self, mgmt_root):
pc = mgmt_root.tm.security.scrubber.profile_s.get_collection()
assert isinstance(pc, list)
assert len(pc)
assert isinstance(pc[0], Profile)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestScrubberCategories(object):
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
ERR = "Missing required params"
with pytest.raises(MissingRequiredCreationParameter) as err:
cat_lst.scrubber_categories.create(name='botnets')
assert str(err.value).startswith(ERR)
def test_create_req_arg(self, scrubber_categories):
c1 = scrubber_categories
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-categories/fake_categories_profile'
assert c1.name == 'fake_categories_profile'
assert c1.selfLink.startswith(URI)
def test_create_optional_args(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
params_set = {'name': 'fake_categories_profile', 'blacklistCategory': '/Common/proxy', 'routeDomainName': '/Common/0',
'nextHop': '1.1.1.1', 'advertisementMethod': 'bgp-flowspec-method'}
c1 = cat_lst.scrubber_categories.create(**params_set)
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-categories/fake_categories_profile'
assert c1.name == 'fake_categories_profile'
assert c1.selfLink.startswith(URI)
assert c1.advertisementMethod == 'bgp-flowspec-method'
c1.delete()
def test_refresh(self, scrubber_categories, mgmt_root):
c1 = scrubber_categories
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
c2 = cat_lst.scrubber_categories.load(name='fake_categories_profile')
assert c1.name == c2.name
assert c1.selfLink == c2.selfLink
assert c1.kind == c2.kind
c2.modify(advertisementMethod='bgp-flowspec-method')
assert c2.advertisementMethod == 'bgp-flowspec-method'
c1.refresh()
assert c1.advertisementMethod == 'bgp-flowspec-method'
def test_delete(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
params_set = {'name': 'fake_categories_profile', 'blacklistCategory': '/Common/proxy',
'routeDomainName': '/Common/0', 'nextHop': '1.1.1.1', 'advertisementMethod': 'bgp-flowspec-method'}
c1 = cat_lst.scrubber_categories.create(**params_set)
c1.delete()
with pytest.raises(HTTPError) as err:
cat_lst.scrubber_categories.load(name='fake_categories_profile')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
with pytest.raises(HTTPError) as err:
cat_lst.scrubber_categories.load(name='fake_categories_profile')
assert err.value.response.status_code == 404
def test_load_and_update(self, scrubber_categories, mgmt_root):
c1 = scrubber_categories
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-categories/fake_categories_profile'
assert c1.name == 'fake_categories_profile'
assert c1.selfLink.startswith(URI)
c1.advertisementMethod = 'bgp-flowspec-method'
c1.update()
assert c1.advertisementMethod == 'bgp-flowspec-method'
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
c2 = cat_lst.scrubber_categories.load(name='fake_categories_profile')
assert c1.name == c2.name
assert c1.selfLink == c2.selfLink
assert c1.kind == c2.kind
assert c2.advertisementMethod == 'bgp-flowspec-method'
def test_scrubber_categories_subcollection(self, scrubber_categories, mgmt_root):
c1 = scrubber_categories
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-categories/fake_categories_profile'
assert c1.name == 'fake_categories_profile'
assert c1.selfLink.startswith(URI)
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
cat_lst = p1.scrubber_categories_s
cat_col = cat_lst.get_collection()
assert isinstance(cat_col, list)
assert len(cat_col)
assert isinstance(cat_col[0], Scrubber_Categories)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestScrubberVS(object):
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
ERR = "Missing required params"
with pytest.raises(MissingRequiredCreationParameter) as err:
vs_lst.scrubber_virtual_server.create(name='fake')
assert str(err.value).startswith(ERR)
def test_create_req_arg(self, scrubber_virtual_server):
v1 = scrubber_virtual_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-virtual-server/fake_virtual_server_profile'
assert v1.name == 'fake_virtual_server_profile'
assert v1.selfLink.startswith(URI)
def test_create_optional_args(self, mgmt_root, virtual):
vs = virtual
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
params_set = {'name': 'fake_virtual_server_profile', 'vsName': vs.fullPath, 'nextHop': '1.1.1.1',
'advertisementMethod': 'bgp-flowspec-method'}
v1 = vs_lst.scrubber_virtual_server.create(**params_set)
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-virtual-server/fake_virtual_server_profile'
assert v1.name == 'fake_virtual_server_profile'
assert v1.selfLink.startswith(URI)
assert v1.advertisementMethod == 'bgp-flowspec-method'
v1.delete()
def test_refresh(self, scrubber_virtual_server, mgmt_root):
v1 = scrubber_virtual_server
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
v2 = vs_lst.scrubber_virtual_server.load(name='fake_virtual_server_profile')
assert v1.name == v2.name
assert v1.selfLink == v2.selfLink
assert v1.kind == v2.kind
v2.modify(advertisementMethod='bgp-flowspec-method')
assert v2.advertisementMethod == 'bgp-flowspec-method'
v1.refresh()
assert v1.advertisementMethod == 'bgp-flowspec-method'
def test_delete(self, mgmt_root, virtual):
vs = virtual
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
params_set = {'name': 'fake_virtual_server_profile', 'vsName': vs.fullPath, 'nextHop': '1.1.1.1',
'advertisementMethod': 'bgp-flowspec-method'}
v1 = vs_lst.scrubber_virtual_server.create(**params_set)
v1.delete()
with pytest.raises(HTTPError) as err:
vs_lst.scrubber_virtual_server.load(name='fake_virtual_server_profile')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
with pytest.raises(HTTPError) as err:
vs_lst.scrubber_virtual_server.load(name='fake_virtual_server_profile')
assert err.value.response.status_code == 404
def test_load_and_update(self, scrubber_virtual_server, mgmt_root):
v1 = scrubber_virtual_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-virtual-server/fake_virtual_server_profile'
assert v1.name == 'fake_virtual_server_profile'
assert v1.selfLink.startswith(URI)
v1.advertisementMethod = 'bgp-flowspec-method'
v1.update()
assert v1.advertisementMethod == 'bgp-flowspec-method'
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
v2 = vs_lst.scrubber_virtual_server.load(name='fake_virtual_server_profile')
assert v1.name == v2.name
assert v1.selfLink == v2.selfLink
assert v1.kind == v2.kind
assert v2.advertisementMethod == 'bgp-flowspec-method'
def test_scrubber_vs_subcollection(self, scrubber_virtual_server, mgmt_root):
v1 = scrubber_virtual_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-virtual-server/fake_virtual_server_profile'
assert v1.name == 'fake_virtual_server_profile'
assert v1.selfLink.startswith(URI)
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
vs_lst = p1.scrubber_virtual_server_s
vs_col = vs_lst.get_collection()
assert isinstance(vs_col, list)
assert len(vs_col)
assert isinstance(vs_col[0], Scrubber_Virtual_Server)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestScrubberNPS(object):
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
ERR = "Missing required params"
with pytest.raises(MissingRequiredCreationParameter) as err:
nps_lst.scrubber_netflow_protected_server.create(name='botnets')
assert str(err.value).startswith(ERR)
def test_create_req_arg(self, scrubber_netflow_protected_server):
nps1 = scrubber_netflow_protected_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-netflow-protected-server/fake_netflow_protected_server_profile'
assert nps1.name == 'fake_netflow_protected_server_profile'
assert nps1.selfLink.startswith(URI)
def test_create_optional_args(self, mgmt_root, netflow_protected_server):
nps = netflow_protected_server
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
params_set = {'name': 'fake_netflow_protected_server_profile', 'npsName': nps.fullPath, 'nextHop': '1.1.1.1',
'advertisementMethod': 'bgp-flowspec-method'}
nps1 = nps_lst.scrubber_netflow_protected_server.create(**params_set)
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-netflow-protected-server/fake_netflow_protected_server_profile'
assert nps1.name == 'fake_netflow_protected_server_profile'
assert nps1.selfLink.startswith(URI)
assert nps1.advertisementMethod == 'bgp-flowspec-method'
nps1.delete()
def test_refresh(self, scrubber_netflow_protected_server, mgmt_root):
nps1 = scrubber_netflow_protected_server
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
c2 = nps_lst.scrubber_netflow_protected_server.load(name='fake_netflow_protected_server_profile')
assert nps1.name == c2.name
assert nps1.selfLink == c2.selfLink
assert nps1.kind == c2.kind
c2.modify(advertisementMethod='bgp-flowspec-method')
assert c2.advertisementMethod == 'bgp-flowspec-method'
nps1.refresh()
assert nps1.advertisementMethod == 'bgp-flowspec-method'
def test_delete(self, mgmt_root, netflow_protected_server):
nps = netflow_protected_server
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
params_set = {'name': 'fake_netflow_protected_server_profile', 'npsName': nps.fullPath, 'nextHop': '1.1.1.1',
'advertisementMethod': 'bgp-flowspec-method'}
nps1 = nps_lst.scrubber_netflow_protected_server.create(**params_set)
nps1.delete()
with pytest.raises(HTTPError) as err:
nps_lst.scrubber_netflow_protected_server.load(name='fake_netflow_protected_server_profile')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
with pytest.raises(HTTPError) as err:
nps_lst.scrubber_netflow_protected_server.load(name='fake_netflow_protected_server_profile')
assert err.value.response.status_code == 404
def test_load_and_update(self, scrubber_netflow_protected_server, mgmt_root):
nps1 = scrubber_netflow_protected_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-netflow-protected-server/fake_netflow_protected_server_profile'
assert nps1.name == 'fake_netflow_protected_server_profile'
assert nps1.selfLink.startswith(URI)
nps1.advertisementMethod = 'bgp-flowspec-method'
nps1.update()
assert nps1.advertisementMethod == 'bgp-flowspec-method'
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
c2 = nps_lst.scrubber_netflow_protected_server.load(name='fake_netflow_protected_server_profile')
assert nps1.name == c2.name
assert nps1.selfLink == c2.selfLink
assert nps1.kind == c2.kind
assert c2.advertisementMethod == 'bgp-flowspec-method'
def test_scrubber_netflow_protected_server_subcollection(self, scrubber_netflow_protected_server, mgmt_root):
nps1 = scrubber_netflow_protected_server
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-netflow-protected-server/fake_netflow_protected_server_profile'
assert nps1.name == 'fake_netflow_protected_server_profile'
assert nps1.selfLink.startswith(URI)
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
nps_lst = p1.scrubber_netflow_protected_server_s
nps_col = nps_lst.get_collection()
assert isinstance(nps_col, list)
assert len(nps_col)
assert isinstance(nps_col[0], Scrubber_Netflow_Protected_Server)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestScrubberRtDomain(object):
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
ERR = "Missing required params"
with pytest.raises(MissingRequiredCreationParameter) as err:
rd_lst.scrubber_rt_domain.create(name='fake_rt_domain_profile')
assert str(err.value).startswith(ERR)
def test_create_req_arg(self, scrubber_rt_domain):
rd1 = scrubber_rt_domain
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile'
assert rd1.name == 'fake_rt_domain_profile'
assert rd1.selfLink.startswith(URI)
def test_create_optional_args(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1', 'advertisementMethod': 'bgp-flowspec-method'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile'
assert rd1.name == 'fake_rt_domain_profile'
assert rd1.selfLink.startswith(URI)
assert rd1.advertisementMethod == 'bgp-flowspec-method'
rd1.delete()
def test_refresh(self, scrubber_rt_domain, mgmt_root):
rd1 = scrubber_rt_domain
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd2 = rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
assert rd1.name == rd2.name
assert rd1.selfLink == rd2.selfLink
assert rd1.kind == rd2.kind
rd2.modify(advertisementMethod='bgp-flowspec-method')
assert rd2.advertisementMethod == 'bgp-flowspec-method'
rd1.refresh()
assert rd1.advertisementMethod == 'bgp-flowspec-method'
def test_delete(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1', 'advertisementMethod': 'bgp-flowspec-method'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
rd1.delete()
with pytest.raises(HTTPError) as err:
rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
assert err.value.response.status_code == 404
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
with pytest.raises(HTTPError) as err:
rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
assert err.value.response.status_code == 404
def test_load_and_update(self, scrubber_rt_domain, mgmt_root):
rd1 = scrubber_rt_domain
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile'
assert rd1.name == 'fake_rt_domain_profile'
assert rd1.selfLink.startswith(URI)
rd1.advertisementMethod = 'bgp-flowspec-method'
rd1.update()
assert rd1.advertisementMethod == 'bgp-flowspec-method'
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd2 = rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
assert rd1.name == rd2.name
assert rd1.selfLink == rd2.selfLink
assert rd1.kind == rd2.kind
assert rd2.advertisementMethod == 'bgp-flowspec-method'
def test_scrubber_rt_domain_subcollection(self, scrubber_rt_domain, mgmt_root):
rd1 = scrubber_rt_domain
URI = 'https://localhost/mgmt/tm/security/' \
'scrubber/profile/~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile'
assert rd1.name == 'fake_rt_domain_profile'
assert rd1.selfLink.startswith(URI)
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd_col = rd_lst.get_collection()
assert isinstance(rd_col, list)
assert len(rd_col)
assert isinstance(rd_col[0], Scrubber_Rt_Domain)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('13.0.0'),
reason='This collection is fully implemented on 13.0.0 or greater.'
)
class TestScrubberRdNetworkPrefix(object):
def test_mandatory_attribute_missing(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
rd_net_lst = rd1.scrubber_rd_network_prefix_s
ERR = "Missing required params"
with pytest.raises(MissingRequiredCreationParameter) as err:
rd_net_lst.scrubber_rd_network_prefix.create(name='fake_rd_network_prefix_profile')
assert str(err.value).startswith(ERR)
rd1.delete()
def test_create_req_arg(self, scrubber_rd_network_prefix):
rd_net1 = scrubber_rd_network_prefix
URI = 'https://localhost/mgmt/tm/security/scrubber/profile/' \
'~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile/scrubber-rd-network-prefix/fake_rd_network_prefix_profile'
assert rd_net1.name == 'fake_rd_network_prefix_profile'
assert rd_net1.selfLink.startswith(URI)
def test_refresh(self, scrubber_rd_network_prefix, mgmt_root):
rd_net1 = scrubber_rd_network_prefix
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd1 = rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
rd_net_lst = rd1.scrubber_rd_network_prefix_s
rd_net2 = rd_net_lst.scrubber_rd_network_prefix.load(name='fake_rd_network_prefix_profile')
assert rd_net1.name == rd_net2.name
assert rd_net1.selfLink == rd_net2.selfLink
assert rd_net1.kind == rd_net2.kind
rd_net2.modify(nextHop='3.3.3.3')
assert rd_net2.nextHop == '3.3.3.3'
rd_net1.refresh()
assert rd_net1.nextHop == '3.3.3.3'
def test_delete(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
rd_net_lst = rd1.scrubber_rd_network_prefix_s
net_params_set = {'name': 'fake_rd_network_prefix_profile', 'dstIp': '1.1.1.0', 'mask': '24', 'nextHop': '2.2.2.2'}
rd_net1 = rd_net_lst.scrubber_rd_network_prefix.create(**net_params_set)
rd_net1.delete()
with pytest.raises(HTTPError) as err:
rd_net_lst.scrubber_rd_network_prefix.load(name='fake_rd_network_prefix_profile')
assert err.value.response.status_code == 404
rd1.delete()
def test_load_no_object(self, mgmt_root):
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
params_set = {'name': 'fake_rt_domain_profile', 'routeDomain': '/Common/0', 'nextHop': '1.1.1.1'}
rd1 = rd_lst.scrubber_rt_domain.create(**params_set)
rd_net_lst = rd1.scrubber_rd_network_prefix_s
with pytest.raises(HTTPError) as err:
rd_net_lst.scrubber_rd_network_prefix.load(name='fake_rd_network_prefix_profile')
assert err.value.response.status_code == 404
rd1.delete()
def test_load_and_update(self, scrubber_rd_network_prefix, mgmt_root):
rd_net1 = scrubber_rd_network_prefix
URI = 'https://localhost/mgmt/tm/security/scrubber/profile/' \
'~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile/scrubber-rd-network-prefix/fake_rd_network_prefix_profile'
assert rd_net1.name == 'fake_rd_network_prefix_profile'
assert rd_net1.selfLink.startswith(URI)
rd_net1.nextHop = '3.3.3.3'
rd_net1.update()
assert rd_net1.nextHop == '3.3.3.3'
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd1 = rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
rd_net_lst = rd1.scrubber_rd_network_prefix_s
rd_net2 = rd_net_lst.scrubber_rd_network_prefix.load(name='fake_rd_network_prefix_profile')
assert rd_net1.name == rd_net2.name
assert rd_net1.selfLink == rd_net2.selfLink
assert rd_net1.kind == rd_net2.kind
assert rd_net2.nextHop == '3.3.3.3'
def test_scrubber_rd_network_prefix_subcollection(self, scrubber_rd_network_prefix, mgmt_root):
rd_net1 = scrubber_rd_network_prefix
URI = 'https://localhost/mgmt/tm/security/scrubber/profile/' \
'~Common~scrubber-profile-default/scrubber-rt-domain/fake_rt_domain_profile/scrubber-rd-network-prefix/fake_rd_network_prefix_profile'
assert rd_net1.name == 'fake_rd_network_prefix_profile'
assert rd_net1.selfLink.startswith(URI)
p1 = mgmt_root.tm.security.scrubber.profile_s.profile.load(name='scrubber-profile-default', partition='Common')
rd_lst = p1.scrubber_rt_domain_s
rd1 = rd_lst.scrubber_rt_domain.load(name='fake_rt_domain_profile')
rd_net_lst = rd1.scrubber_rd_network_prefix_s
rd_net_col = rd_net_lst.get_collection()
assert isinstance(rd_net_col, list)
assert len(rd_net_col)
assert isinstance(rd_net_col[0], Scrubber_Rd_Network_Prefix)
| 53.0304 | 151 | 0.715152 | 4,301 | 33,144 | 5.246454 | 0.049523 | 0.082429 | 0.054243 | 0.06869 | 0.903257 | 0.887392 | 0.840505 | 0.815998 | 0.791048 | 0.785509 | 0 | 0.02027 | 0.172399 | 33,144 | 624 | 152 | 53.115385 | 0.80237 | 0.016655 | 0 | 0.740331 | 0 | 0.012891 | 0.247414 | 0.14587 | 0 | 0 | 0 | 0 | 0.244936 | 1 | 0.092081 | false | 0 | 0.018416 | 0 | 0.121547 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ecf7869a29bb2293ed5c38986da7c1f8ec2b175c | 3,030 | py | Python | src/genie/libs/parser/iosxe/tests/ShowLispRemoteLocatorSet/cli/equal/golden_output1_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowLispRemoteLocatorSet/cli/equal/golden_output1_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowLispRemoteLocatorSet/cli/equal/golden_output1_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | expected_output = {
'lisp_id': {
0: {
'remote_locator_name': {
'default-etr-locator-set-ipv6': {
'rloc': {
'100.88.88.88': {
'instance_id': {
'101': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
'102': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
'103': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
},
},
'100.99.99.99': {
'instance_id': {
'104': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
'105': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
'106': {
'domain_id': '0',
'etr_type': 'Default',
'metric': '-',
'multihome_id': '0',
'priority': '255',
'weight': '100',
},
},
},
},
},
},
},
},
}
| 43.913043 | 59 | 0.168977 | 116 | 3,030 | 4.206897 | 0.258621 | 0.079918 | 0.110656 | 0.147541 | 0.75 | 0.75 | 0.75 | 0.75 | 0.75 | 0.75 | 0 | 0.102994 | 0.724422 | 3,030 | 68 | 60 | 44.558824 | 0.481437 | 0 | 0 | 0.558824 | 0 | 0 | 0.168977 | 0.009241 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a60a261b4a97245d3bc5b707df8cdfd5c22ab440 | 251 | py | Python | tccli/services/iotexplorer/__init__.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | tccli/services/iotexplorer/__init__.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | tccli/services/iotexplorer/__init__.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from tccli.services.iotexplorer.iotexplorer_client import register_arg
from tccli.services.iotexplorer.iotexplorer_client import get_actions_info
from tccli.services.iotexplorer.iotexplorer_client import AVAILABLE_VERSION_LIST
| 50.2 | 80 | 0.860558 | 32 | 251 | 6.5 | 0.53125 | 0.129808 | 0.245192 | 0.403846 | 0.735577 | 0.735577 | 0.735577 | 0 | 0 | 0 | 0 | 0.004274 | 0.067729 | 251 | 4 | 81 | 62.75 | 0.884615 | 0.083665 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
a6586858a3e3a5b6b40ad2c5f7cb601ed745410f | 1,414 | py | Python | z3/min_max.py | monadius/formal_misc | c8109869b14e89e6423a2a5520967246277493ff | [
"MIT"
] | null | null | null | z3/min_max.py | monadius/formal_misc | c8109869b14e89e6423a2a5520967246277493ff | [
"MIT"
] | 1 | 2021-06-09T19:04:51.000Z | 2021-06-09T20:02:58.000Z | z3/min_max.py | monadius/formal_misc | c8109869b14e89e6423a2a5520967246277493ff | [
"MIT"
] | null | null | null | from z3 import *
from z3opt import *
constraints = []
f = Q(281474976710656,3125)
print f
u = find_upper_bound(f,[],1)
print "u =", u
l = find_upper_bound(-f,[],1)
print "l =", l
maximize(f, [], l, u, 0.1)
#fTol = 0.010000
#l, u = find_bounds(f, constraints, fTol)
#print l
#print u
#[x2, x1] = Reals('x2, x1')
#constraints = [x2 >= -20.000000, x1 >= -5.000000, x2 <= 5.000000, x1 <= 5.000000]
#f = ((((((((x1 * x1) + 1.000000) * (((2.000000 * x1) * (((((3.000000 * x1) * x1) + (2.000000 * x2)) - x1) * (1.000000 / ((x1 * x1) + 1.000000)))) * ((1.000000 / ((x1 * x1) + 1.000000)) * (x1 * (3.000000 * x1))))) + (((x1 * x1) + 1.000000) * (((((((3.000000 * x1) * x1) + (2.000000 * x2)) - x1) * (1.000000 / ((x1 * x1) + 1.000000))) - 3.000000) * ((2.000000 * x1) * ((1.000000 / ((x1 * x1) + 1.000000)) * (x1 * (3.000000 * x1))))))) + (((x1 * x1) + 1.000000) * ((x1 * x1) * (4.000000 * ((1.000000 / ((x1 * x1) + 1.000000)) * (x1 * (3.000000 * x1))))))) + (((3.000000 * x1) * x1) * ((1.000000 / ((x1 * x1) + 1.000000)) * (x1 * (3.000000 * x1))))) + ((((((3.000000 * x1) * x1) + (2.000000 * x2)) - x1) * (1.000000 / ((x1 * x1) + 1.000000))) * (x1 * (3.000000 * x1)))) + (3.000000 * ((1.000000 / ((x1 * x1) + 1.000000)) * (x1 * (3.000000 * x1)))))
#f = 3*(x1 ** 2)*(8*x1*x2+2*x2+4*(x1**4)+6*(x1**3)+6*(x1**2)-7*x1+3)/((x1**2)+1)
#print f
#fTol = 1.010000
#l, u = find_bounds(f, constraints, fTol)
#print l
#print u
| 45.612903 | 845 | 0.493635 | 236 | 1,414 | 2.932203 | 0.139831 | 0.33526 | 0.208092 | 0.190751 | 0.689306 | 0.689306 | 0.612717 | 0.612717 | 0.612717 | 0.612717 | 0 | 0.359334 | 0.193069 | 1,414 | 30 | 846 | 47.133333 | 0.247152 | 0.830976 | 0 | 0 | 0 | 0 | 0.026549 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.2 | null | null | 0.3 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a6b79c214c687af88b57af46ba737888c9896824 | 5,987 | py | Python | ndlib/test/test_dynamic_compartment.py | ggrrll/ndlib | 375d00d69180dc2a38f3690b4bc2cdce40fd86de | [
"BSD-2-Clause"
] | 3 | 2021-01-09T06:50:47.000Z | 2021-11-01T00:16:39.000Z | ndlib/test/test_dynamic_compartment.py | ggrrll/ndlib | 375d00d69180dc2a38f3690b4bc2cdce40fd86de | [
"BSD-2-Clause"
] | null | null | null | ndlib/test/test_dynamic_compartment.py | ggrrll/ndlib | 375d00d69180dc2a38f3690b4bc2cdce40fd86de | [
"BSD-2-Clause"
] | null | null | null | from __future__ import absolute_import
import unittest
import dynetx as dn
import networkx as nx
import numpy as np
import past.builtins
import ndlib.models.ModelConfig as mc
import ndlib.models.DynamicCompostiteModel as gc
import ndlib.models.compartments as cpm
__author__ = 'Giulio Rossetti'
__license__ = "BSD-2-Clause"
__email__ = "giulio.rossetti@gmail.com"
class NdlibDynCompartmentsTest(unittest.TestCase):
def test_dyn_node_stochastic(self):
dg = dn.DynGraph()
for t in past.builtins.xrange(0, 3):
g = nx.erdos_renyi_graph(200, 0.05)
dg.add_interactions_from(g.edges(), t)
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
model.add_status("Removed")
c1 = cpm.NodeStochastic(0.02, "Infected")
c2 = cpm.NodeStochastic(0.01)
c3 = cpm.NodeStochastic(0.5)
model.add_rule("Susceptible", "Infected", c1)
model.add_rule("Infected", "Removed", c2)
model.add_rule("Infected", "Susceptible", c3)
config = mc.Configuration()
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
def test_dyn_node_threshold(self):
# Fixed Threshold
dg = dn.DynGraph()
for t in past.builtins.xrange(0, 3):
g = nx.erdos_renyi_graph(200, 0.05)
dg.add_interactions_from(g.edges(), t)
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
c1 = cpm.NodeThreshold(0.1, triggering_status="Infected")
model.add_rule("Susceptible", "Infected", c1)
config = mc.Configuration()
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
# Ad-hoc Threshold
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
c1 = cpm.NodeThreshold(triggering_status="Infected")
model.add_rule("Susceptible", "Infected", c1)
config = mc.Configuration()
for i in g.nodes():
config.add_node_configuration("threshold", i, np.random.random_sample())
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
def test_dyn_edge_stochastic(self):
# Fixed Threshold
dg = dn.DynGraph()
for t in past.builtins.xrange(0, 3):
g = nx.erdos_renyi_graph(200, 0.05)
dg.add_interactions_from(g.edges(), t)
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
c1 = cpm.EdgeStochastic(0.1, triggering_status="Infected")
model.add_rule("Susceptible", "Infected", c1)
config = mc.Configuration()
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
# Ad-hoc Threshold
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
c1 = cpm.EdgeStochastic(triggering_status="Infected")
model.add_rule("Susceptible", "Infected", c1)
config = mc.Configuration()
for e in g.edges():
config.add_edge_configuration("threshold", e, np.random.random_sample())
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
# Predefined threshold 1/N
model = gc.DynamicCompositeModel(dg)
model.add_status("Susceptible")
model.add_status("Infected")
c1 = cpm.EdgeStochastic(triggering_status="Infected")
model.add_rule("Susceptible", "Infected", c1)
config = mc.Configuration()
config.add_model_parameter('fraction_infected', 0.1)
model.set_initial_status(config)
iterations = model.execute_snapshots()
self.assertEqual(len(iterations), 3)
iterations = model.execute_iterations()
trends = model.build_trends(iterations)
self.assertEqual(len(trends[0]['trends']['status_delta'][1]),
len([x for x in dg.stream_interactions() if x[2] == "+"]))
| 33.077348 | 84 | 0.634374 | 694 | 5,987 | 5.285303 | 0.151297 | 0.045802 | 0.049618 | 0.049073 | 0.822519 | 0.822519 | 0.813522 | 0.813522 | 0.813522 | 0.813522 | 0 | 0.019819 | 0.241523 | 5,987 | 180 | 85 | 33.261111 | 0.787932 | 0.015033 | 0 | 0.760331 | 0 | 0 | 0.10236 | 0.004244 | 0 | 0 | 0 | 0 | 0.099174 | 1 | 0.024793 | false | 0 | 0.07438 | 0 | 0.107438 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a6bd6b8168c14ffcd72ff6c5108c1fbc0e580b36 | 133 | py | Python | obj_tables/math/__init__.py | KarrLab/schema | 1f192754e0ee32b07bde9bcf8a21723d4b393f9e | [
"MIT"
] | 7 | 2020-08-07T17:24:29.000Z | 2021-05-20T04:45:14.000Z | obj_tables/math/__init__.py | KarrLab/schema | 1f192754e0ee32b07bde9bcf8a21723d4b393f9e | [
"MIT"
] | 92 | 2019-09-18T20:52:31.000Z | 2020-12-10T17:46:52.000Z | obj_tables/math/__init__.py | KarrLab/obj_model | 1f192754e0ee32b07bde9bcf8a21723d4b393f9e | [
"MIT"
] | 2 | 2020-10-05T21:02:09.000Z | 2021-06-14T03:08:06.000Z | from .expression import * # noqa: F401, F403
from .numeric import * # noqa: F401, F403
from .symbolic import * # noqa: F401, F403
| 33.25 | 45 | 0.684211 | 18 | 133 | 5.055556 | 0.444444 | 0.32967 | 0.461538 | 0.593407 | 0.483516 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169811 | 0.203008 | 133 | 3 | 46 | 44.333333 | 0.688679 | 0.37594 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
a6ca61ef57009dd79fa7bcad77e509649aded934 | 4,875 | py | Python | tests/transfer_array_tests/test_transfer_array_slice.py | SX-Aurora/orchespy | 6b85a78831c8e3e05df7143101ca3418817fcbbd | [
"BSD-3-Clause"
] | null | null | null | tests/transfer_array_tests/test_transfer_array_slice.py | SX-Aurora/orchespy | 6b85a78831c8e3e05df7143101ca3418817fcbbd | [
"BSD-3-Clause"
] | null | null | null | tests/transfer_array_tests/test_transfer_array_slice.py | SX-Aurora/orchespy | 6b85a78831c8e3e05df7143101ca3418817fcbbd | [
"BSD-3-Clause"
] | null | null | null | import orchespy
from orchespy.devicetype import CUDAGPU, Host, VE
import sys
import pytest
import numpy as np
if "cupy" in sys.modules:
import cupy as cp
if "nlcpy" in sys.modules:
import nlcpy as vp
no_nlcpy = pytest.mark.skipif(
"nlcpy" not in sys.modules, reason=' test require nlcpy. ')
no_cupy = pytest.mark.skipif(
"cupy" not in sys.modules, reason=' test require cupy. ')
# Transfer 2d int zero array from VE to Host
@pytest.mark.parametrize('shape', [
(10,), (10, 10), (10, 10, 10), (10, 20), (20, 10), (10, 20, 30)
])
@pytest.mark.parametrize('dtype', [
'i4', 'i8', 'u4', 'u8', 'f4', 'f8', 'c8', 'c16'
])
@pytest.mark.parametrize('order', ['C', 'F'])
@pytest.mark.parametrize('sld', [
(3, 7, None), (None, None, 2), (None, None, -1)
])
class TestTransfer:
# Transfer zero array from VE to Host
@no_nlcpy
@no_cupy
def test_transfer_slice_v2h(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = vp.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, Host)
# check data type
assert(type(dst) is np.ndarray)
# check elements
expected_t = np.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
# Transfer zero array from VE to GPU
@no_nlcpy
@no_cupy
def test_transfer_slice_v2g(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = vp.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, CUDAGPU)
# check data type
assert(type(dst) is cp.ndarray)
# check elements
expected_t = cp.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
# Transfer zero array from Host to VE
@no_nlcpy
def test_transfer_slice_h2v(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = np.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, VE)
# check data type
assert(type(dst) is vp.ndarray)
# check elements
expected_t = vp.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
# Transfer zero array from Host to GPU
@no_cupy
def test_transfer_slice_h2g(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = np.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, CUDAGPU)
# check data type
assert(type(dst) is cp.ndarray)
# check elements
expected_t = cp.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
# Transfer zero array from GPU to Host
@no_cupy
def test_transfer_slice_g2h(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = cp.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, Host)
# check data type
assert(type(dst) is np.ndarray)
# check elements
expected_t = np.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
# Transfer zero array from GPU to VE
@no_nlcpy
@no_cupy
def test_transfer_slice_g2v(self, shape, dtype, order, sld):
sl = slice(sld[0], sld[1], sld[2])
num = 1
for itm in shape:
num = num * itm
src_t = cp.arange(num, dtype=dtype).reshape(shape, order=order)
src = src_t[sl]
dst = orchespy.transfer_array(src, VE)
# check data type
assert(type(dst) is vp.ndarray)
# check elements
expected_t = vp.arange(num, dtype=dtype).reshape(shape, order=order)
expected = expected_t[sl]
assert(src.strides == expected.strides)
assert((dst == expected).all())
| 34.821429 | 76 | 0.593436 | 680 | 4,875 | 4.167647 | 0.127941 | 0.016937 | 0.05928 | 0.080452 | 0.835568 | 0.835568 | 0.793225 | 0.770642 | 0.735709 | 0.735709 | 0 | 0.019903 | 0.278564 | 4,875 | 139 | 77 | 35.071942 | 0.785897 | 0.091077 | 0 | 0.756757 | 0 | 0 | 0.021764 | 0 | 0 | 0 | 0 | 0 | 0.162162 | 1 | 0.054054 | false | 0 | 0.063063 | 0 | 0.126126 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a6d4ab6768f4a671b57ade5234ca0b1a3a3fc795 | 27,735 | py | Python | imgreco/main.py | litchiar/ArknightsAutoHelper | 55cc4c751ba899aaa3cabe4222687a6ef930f267 | [
"MIT"
] | null | null | null | imgreco/main.py | litchiar/ArknightsAutoHelper | 55cc4c751ba899aaa3cabe4222687a6ef930f267 | [
"MIT"
] | null | null | null | imgreco/main.py | litchiar/ArknightsAutoHelper | 55cc4c751ba899aaa3cabe4222687a6ef930f267 | [
"MIT"
] | null | null | null | from fractions import Fraction
import numpy as np
from PIL import Image
from util.richlog import get_logger
from . import imgops
from . import resources
from . import util
logger = get_logger(__name__)
def check_main(img):
vw, vh = util.get_vwvh(img.size)
gear1 = img.crop((3.148 * vh, 2.037 * vh, 9.907 * vh, 8.796 * vh)).convert('L')
gear2 = resources.load_image_cached('main/gear.png', 'L')
gear1, gear2 = imgops.uniform_size(gear1, gear2)
result = imgops.compare_ccoeff(gear1, gear2)
# result = np.corrcoef(np.asarray(gear1).flat, np.asarray(gear2).flat)[0, 1]
logger.logimage(gear1)
logger.logtext('ccoeff=%f' % result)
return result > 0.9
def get_ballte_corners(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (
(61.120 * vw, 16.944 * vh), (82.213 * vw, 15.139 * vh), (82.213 * vw, 37.083 * vh),
(61.120 * vw, 38.333 * vh))
elif aspect == Fraction(18, 9):
return (
(64.693 * vw, 16.852 * vh), (82.378 * vw, 14.352 * vh), (82.378 * vw, 37.500 * vh),
(64.693 * vw, 37.963 * vh))
else:
return [x[0] for x in imgops.find_homography(resources.load_image_cached('main/terminal.png', 'L'), img)]
def get_task_corners(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((55.602 * vw, 75.880 * vh)), np.array((70.367 * vw, 78.241 * vh)),
np.array((70.367 * vw, 91.991 * vh)), np.array((55.602 * vw, 88.518 * vh)))
elif aspect == Fraction(18, 9):
return (np.array((58.489 * vw, 76.296 * vh)), np.array((72.008 * vw, 78.611 * vh)),
np.array((72.008 * vw, 92.685 * vh)), np.array((58.489 * vw, 89.167 * vh)))
else:
return [x[0] for x in imgops.find_homography(resources.load_image_cached('main/quest.png', 'L'), img)]
# 以下几条用于访问好友基建
def get_friend_corners(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((22.734 * vw, 76.667 * vh)), np.array((33.203 * vw, 76.667 * vh)),
np.array((33.203 * vw, 82.083 * vh)), np.array((22.734 * vw, 82.083 * vh)))
else:
return [x[0] for x in imgops.find_homography(resources.load_image_cached('main/friends.png', 'L'), img)]
def get_friend_list(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (
np.array((1.484 * vw, 25.694 * vh)), np.array((16.797 * vw, 25.694 * vh)),
np.array((16.797 * vw, 36.111 * vh)),
np.array((1.484 * vw, 36.111 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 获得采购中心
def get_shopping_center(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((747 / 12.8 * vw, 421 / 7.2 * vh)), np.array((924 / 12.8 * vw, 421 / 7.2 * vh)),
np.array((926 / 12.8 * vw, 538 / 7.2 * vh)), np.array((747 / 12.8 * vw, 532 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_building_button(img): # 进入基建
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((921 / 12.8 * vw, 562 / 7.2 * vh)), np.array((1145 / 12.8 * vw, 579 / 7.2 * vh)),
np.array((1137 / 12.8 * vw, 699 / 7.2 * vh)), np.array((921 / 12.8 * vw, 672 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 进入信用交易所
def get_credit_center(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((1095 / 12.8 * vw, 88 / 7.2 * vh)), np.array((1268 / 12.8 * vw, 87 / 7.2 * vh)),
np.array((1266 / 12.8 * vw, 129 / 7.2 * vh)), np.array((1095 / 12.8 * vw, 126 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 领取信用
def get_credit_daily(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((965 / 12.8 * vw, 25 / 7.2 * vh)), np.array((1071 / 12.8 * vw, 24 / 7.2 * vh)),
np.array((1073 / 12.8 * vw, 51 / 7.2 * vh)), np.array((965 / 12.8 * vw, 54 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 领取信用
def get_credit_item(img, index):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
credit_items = {
0: (np.array((136 / 12.8 * vw, 273 / 7.2 * vh)), np.array((136 / 12.8 * vw, 273 / 7.2 * vh)),
np.array((136 / 12.8 * vw, 273 / 7.2 * vh)), np.array((136 / 12.8 * vw, 273 / 7.2 * vh))),
1: (np.array((388 / 12.8 * vw, 267 / 7.2 * vh)), np.array((388 / 12.8 * vw, 267 / 7.2 * vh)),
np.array((388 / 12.8 * vw, 267 / 7.2 * vh)), np.array((388 / 12.8 * vw, 267 / 7.2 * vh))),
2: (np.array((643 / 12.8 * vw, 264 / 7.2 * vh)), np.array((643 / 12.8 * vw, 264 / 7.2 * vh)),
np.array((643 / 12.8 * vw, 264 / 7.2 * vh)), np.array((643 / 12.8 * vw, 264 / 7.2 * vh))),
3: (np.array((900 / 12.8 * vw, 259 / 7.2 * vh)), np.array((900 / 12.8 * vw, 259 / 7.2 * vh)),
np.array((900 / 12.8 * vw, 259 / 7.2 * vh)), np.array((900 / 12.8 * vw, 259 / 7.2 * vh))),
4: (np.array((1149 / 12.8 * vw, 270 / 7.2 * vh)), np.array((1149 / 12.8 * vw, 270 / 7.2 * vh)),
np.array((1149 / 12.8 * vw, 270 / 7.2 * vh)), np.array((1149 / 12.8 * vw, 270 / 7.2 * vh))),
5: (np.array((127 / 12.8 * vw, 523 / 7.2 * vh)), np.array((127 / 12.8 * vw, 523 / 7.2 * vh)),
np.array((127 / 12.8 * vw, 523 / 7.2 * vh)), np.array((127 / 12.8 * vw, 523 / 7.2 * vh))),
}
return credit_items[index]
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 领取信用
def get_state(img, index):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
credit_items = {1: (np.array((1047 / 12.8 * vw, 37 / 7.2 * vh)), np.array((1047 / 12.8 * vw, 37 / 7.2 * vh)),
np.array((1047 / 12.8 * vw, 37 / 7.2 * vh)), np.array((1047 / 12.8 * vw, 37 / 7.2 * vh))),
2: (np.array((881 / 12.8 * vw, 39 / 7.2 * vh)), np.array((881 / 12.8 * vw, 39 / 7.2 * vh)),
np.array((881 / 12.8 * vw, 39 / 7.2 * vh)), np.array((881 / 12.8 * vw, 39 / 7.2 * vh))),
}
return credit_items[index]
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_building_blocks(img, index):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
credit_items = {1: (np.array((145 / 12.8 * vw, 316 / 7.2 * vh)), np.array((145 / 12.8 * vw, 316 / 7.2 * vh)),
np.array((145 / 12.8 * vw, 316 / 7.2 * vh)), np.array((145 / 12.8 * vw, 316 / 7.2 * vh))),
2: (np.array((278 / 12.8 * vw, 315 / 7.2 * vh)), np.array((278 / 12.8 * vw, 315 / 7.2 * vh)),
np.array((278 / 12.8 * vw, 315 / 7.2 * vh)), np.array((278 / 12.8 * vw, 315 / 7.2 * vh))),
3: (np.array((485 / 12.8 * vw, 316 / 7.2 * vh)), np.array((485 / 12.8 * vw, 316 / 7.2 * vh)),
np.array((485 / 12.8 * vw, 316 / 7.2 * vh)), np.array((485 / 12.8 * vw, 316 / 7.2 * vh))),
4: (np.array((13 / 12.8 * vw, 415 / 7.2 * vh)), np.array((13 / 12.8 * vw, 415 / 7.2 * vh)),
np.array((13 / 12.8 * vw, 415 / 7.2 * vh)), np.array((13 / 12.8 * vw, 415 / 7.2 * vh))),
5: (np.array((166 / 12.8 * vw, 412 / 7.2 * vh)), np.array((166 / 12.8 * vw, 412 / 7.2 * vh)),
np.array((166 / 12.8 * vw, 412 / 7.2 * vh)), np.array((166 / 12.8 * vw, 412 / 7.2 * vh))),
6: (np.array((380 / 12.8 * vw, 415 / 7.2 * vh)), np.array((380 / 12.8 * vw, 415 / 7.2 * vh)),
np.array((380 / 12.8 * vw, 415 / 7.2 * vh)), np.array((380 / 12.8 * vw, 415 / 7.2 * vh))),
7: (np.array((62 / 12.8 * vw, 496 / 7.2 * vh)), np.array((65 / 12.8 * vw, 496 / 7.2 * vh)),
np.array((62 / 12.8 * vw, 496 / 7.2 * vh)), np.array((65 / 12.8 * vw, 496 / 7.2 * vh))),
8: (np.array((268 / 12.8 * vw, 496 / 7.2 * vh)), np.array((268 / 12.8 * vw, 496 / 7.2 * vh)),
np.array((268 / 12.8 * vw, 496 / 7.2 * vh)), np.array((268 / 12.8 * vw, 496 / 7.2 * vh))),
9: (np.array((484 / 12.8 * vw, 496 / 7.2 * vh)), np.array((484 / 12.8 * vw, 496 / 7.2 * vh)),
np.array((484 / 12.8 * vw, 496 / 7.2 * vh)), np.array((484 / 12.8 * vw, 496 / 7.2 * vh))),
# 中枢
10: (np.array((861 / 12.8 * vw, 153 / 7.2 * vh)), np.array((861 / 12.8 * vw, 153 / 7.2 * vh)),
np.array((861 / 12.8 * vw, 153 / 7.2 * vh)), np.array((861 / 12.8 * vw, 153 / 7.2 * vh))),
# 宿舍1
11: (np.array((812 / 12.8 * vw, 310 / 7.2 * vh)), np.array((812 / 12.8 * vw, 310 / 7.2 * vh)),
np.array((812 / 12.8 * vw, 310 / 7.2 * vh)), np.array((812 / 12.8 * vw, 310 / 7.2 * vh))),
# 宿舍2
12: (np.array((893 / 12.8 * vw, 418 / 7.2 * vh)), np.array((893 / 12.8 * vw, 418 / 7.2 * vh)),
np.array((893 / 12.8 * vw, 418 / 7.2 * vh)), np.array((893 / 12.8 * vw, 418 / 7.2 * vh))),
# 宿舍3
13: (np.array((785 / 12.8 * vw, 517 / 7.2 * vh)), np.array((785 / 12.8 * vw, 517 / 7.2 * vh)),
np.array((785 / 12.8 * vw, 517 / 7.2 * vh)), np.array((785 / 12.8 * vw, 517 / 7.2 * vh))),
# 宿舍4
14: (np.array((913 / 12.8 * vw, 632 / 7.2 * vh)), np.array((913 / 12.8 * vw, 632 / 7.2 * vh)),
np.array((913 / 12.8 * vw, 632 / 7.2 * vh)), np.array((913 / 12.8 * vw, 632 / 7.2 * vh))),
}
return credit_items[index]
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_credit_shopping_check(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((801 / 12.8 * vw, 556 / 7.2 * vh)), np.array((1028 / 12.8 * vw, 552 / 7.2 * vh)),
np.array((1023 / 12.8 * vw, 603 / 7.2 * vh)), np.array((815 / 12.8 * vw, 604 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_back(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((22 / 12.8 * vw, 21 / 7.2 * vh)), np.array((152 / 12.8 * vw, 21 / 7.2 * vh)),
np.array((151 / 63 * vw, 603 / 7.2 * vh)), np.array((19 / 12.8 * vw, 61 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_back2(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((23 / 12.8 * vw, 18 / 7.2 * vh)), np.array((163 / 12.8 * vw, 15 / 7.2 * vh)),
np.array((160 / 63 * vw, 60 / 7.2 * vh)), np.array((20 / 12.8 * vw, 60 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_back2_yes(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((849 / 12.8 * vw, 498 / 7.2 * vh)), np.array((849 / 12.8 * vw, 498 / 7.2 * vh)),
np.array((849 / 63 * vw, 498 / 7.2 * vh)), np.array((849 / 12.8 * vw, 498 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_back2_clear(img): # 清空选择
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((512 / 12.8 * vw, 672 / 7.2 * vh)), np.array((512 / 12.8 * vw, 672 / 7.2 * vh)),
np.array((512 / 12.8 * vw, 672 / 7.2 * vh)), np.array((512 / 12.8 * vw, 672 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 打开进驻
def get_setting_block(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((7 / 12.8 * vw, 246 / 7.2 * vh)), np.array((113 / 12.8 * vw, 246 / 7.2 * vh)),
np.array((118 / 63 * vw, 352 / 7.2 * vh)), np.array((7 / 12.8 * vw, 349 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_clear_working(img, ):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((878 / 12.8 * vw, 109 / 7.2 * vh)), np.array((963 / 12.8 * vw, 123 / 7.2 * vh)),
np.array((974 / 12.8 * vw, 213 / 7.2 * vh)), np.array((860 / 12.8 * vw, 210 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_character(img, index):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
if index == 1:
return (np.array((427 / 12.8 * vw, 129 / 7.2 * vh)), np.array((527 / 12.8 * vw, 135 / 7.2 * vh)),
np.array((541 / 12.8 * vw, 324 / 7.2 * vh)), np.array((431 / 12.8 * vw, 315 / 7.2 * vh)))
if index == 2:
return (np.array((430 / 12.8 * vw, 415 / 7.2 * vh)), np.array((538 / 12.8 * vw, 418 / 7.2 * vh)),
np.array((530 / 12.8 * vw, 586 / 7.2 * vh)), np.array((428 / 12.8 * vw, 580 / 7.2 * vh)))
if index == 3:
return (np.array((583 / 12.8 * vw, 141 / 7.2 * vh)), np.array((668 / 12.8 * vw, 142 / 7.2 * vh)),
np.array((681 / 12.8 * vw, 307 / 7.2 * vh)), np.array((580 / 12.8 * vw, 289 / 7.2 * vh)))
if index == 4:
return (np.array((572 / 12.8 * vw, 405 / 7.2 * vh)), np.array((678 / 12.8 * vw, 403 / 7.2 * vh)),
np.array((683 / 12.8 * vw, 597 / 7.2 * vh)), np.array((565 / 12.8 * vw, 591 / 7.2 * vh)))
if index == 5:
return (np.array((723 / 12.8 * vw, 111 / 7.2 * vh)), np.array((833 / 12.8 * vw, 114 / 7.2 * vh)),
np.array((816 / 12.8 * vw, 310 / 7.2 * vh)), np.array((723 / 12.8 * vw, 297 / 7.2 * vh)))
if index == 6:
return (np.array((714 / 12.8 * vw, 400 / 7.2 * vh)), np.array((827 / 12.8 * vw, 400 / 7.2 * vh)),
np.array((830 / 12.8 * vw, 610 / 7.2 * vh)), np.array((714 / 12.8 * vw, 595 / 7.2 * vh)))
if index == 7:
return (np.array((878 / 12.8 * vw, 109 / 7.2 * vh)), np.array((963 / 12.8 * vw, 123 / 7.2 * vh)),
np.array((968 / 12.8 * vw, 315 / 7.2 * vh)), np.array((869 / 12.8 * vw, 300 / 7.2 * vh)))
if index == 8:
return (np.array((864 / 12.8 * vw, 402 / 7.2 * vh)), np.array((963 / 12.8 * vw, 412 / 7.2 * vh)),
np.array((954 / 12.8 * vw, 585 / 7.2 * vh)), np.array((863 / 12.8 * vw, 573 / 7.2 * vh)))
if index == 9:
return (np.array((1004 / 12.8 * vw, 114 / 7.2 * vh)), np.array((1100 / 12.8 * vw, 113 / 7.2 * vh)),
np.array((1100 / 12.8 * vw, 294 / 7.2 * vh)), np.array((1005 / 12.8 * vw, 325 / 7.2 * vh)))
if index == 10:
return (np.array((1010 / 12.8 * vw, 411 / 7.2 * vh)), np.array((1100 / 12.8 * vw, 415 / 7.2 * vh)),
np.array((1100 / 12.8 * vw, 588 / 7.2 * vh)), np.array((1010 / 12.8 * vw, 577 / 7.2 * vh)))
if index == -1: # 确认
return (np.array((1100 / 12.8 * vw, 654 / 7.2 * vh)), np.array((1253 / 12.8 * vw, 660 / 7.2 * vh)),
np.array((1251 / 12.8 * vw, 694 / 7.2 * vh)), np.array((1113 / 12.8 * vw, 694 / 7.2 * vh)))
if index == -2: # 中间
return (np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)),
np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_color_is_white(img):
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
src_strlist = img.load()
data = src_strlist[1194 / 12.8 * vw, 90 / 7.2 * vh]
# print(data)
if data[0] == data[1] and data[1] == data[2]:
return True
else:
return False
# if index == 1: # 筛选
# return (np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)),
# np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)))
# if index == 2:
# return (np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)),
# np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)))
# if index == 3:
# return (np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)),
# np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_color_is_black(img):
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
src_strlist = img.load()
data = src_strlist[400 / 12.8 * vw, 360 / 7.2 * vh]
# print(data)
if data[0] == data[1] and data[1] == data[2]:
return True
else:
return False
# if index == 1: # 筛选
# return (np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)),
# np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)))
# if index == 2:
# return (np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)),
# np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)))
# if index == 3:
# return (np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)),
# np.array((611 / 12.8 * vw, 370 / 7.2 * vh)), np.array((611 / 12.8 * vw, 370 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_choose_rest(img, index):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
if index == 1: # 筛选
return (np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)),
np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 39 / 7.2 * vh)))
if index == 2:
return (np.array((400 / 12.8 * vw, 360 / 7.2 * vh)), np.array((400 / 12.8 * vw, 360 / 7.2 * vh)),
np.array((400 / 12.8 * vw, 360 / 7.2 * vh)), np.array((400 / 12.8 * vw, 360 / 7.2 * vh)))
if index == 3:
return (np.array((941 / 12.8 * vw, 552 / 7.2 * vh)), np.array((941 / 12.8 * vw, 552 / 7.2 * vh)),
np.array((941 / 12.8 * vw, 552 / 7.2 * vh)), np.array((941 / 12.8 * vw, 552 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_friend_build(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((74.065 * vw, 17.134 * vh)), np.array((79.967 * vw, 17.134 * vh)),
np.array((79.967 * vw, 28.065 * vh)), np.array((74.065 * vw, 28.065 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_next_friend_build(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((85.625 * vw, 79.444 * vh)), np.array((99.531 * vw, 79.444 * vh)),
np.array((99.531 * vw, 93.750 * vh)), np.array((85.625 * vw, 93.750 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_back_my_build(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((72.266 * vw, 81.528 * vh)), np.array((88.750 * vw, 81.528 * vh)),
np.array((88.750 * vw, 92.500 * vh)), np.array((72.266 * vw, 92.500 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 点击基建主界面右上角的提示(以凸显一键收取)
def get_my_build_task_1(img): # 上一个
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((1178 / 12.8 * vw, 73 / 7.2 * vh)), np.array((1272 / 12.8 * vw, 72 / 7.2 * vh)),
np.array((1272 / 12.8 * vw, 112 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 114 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
def get_my_build_task_2(img): # 下一个
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((1178 / 12.8 * vw, 123 / 7.2 * vh)), np.array((1272 / 12.8 * vw, 124 / 7.2 * vh)),
np.array((1272 / 12.8 * vw, 162 / 7.2 * vh)), np.array((1175 / 12.8 * vw, 162 / 7.2 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 一键收取制造站的物品
def get_my_build_task_clear(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((12.500 * vw, 91.667 * vh)), np.array((16.797 * vw, 91.667 * vh)),
np.array((16.797 * vw, 98.472 * vh)), np.array((12.500 * vw, 98.472 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# def get_my_sell_task(img):
# """
# :returns: [0][1]
# [3][2]
# """
# aspect = Fraction(*img.size)
# vw, vh = util.get_vwvh(img)
# if aspect == Fraction(16, 9):
# return (np.array((51.111*vw, 14.375*vh)), np.array((60.000*vw, 14.375*vh)), np.array((60.000*vw, *vh)), np.array((51.111*vw, *vh)))
# else:
# # FIXME: implement with feature matching?
# raise NotImplementedError('unsupported aspect ratio')
# 从基建主界面点击进入第二间贸易站
def get_my_sell_task_1(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (
np.array((5.781 * vw, 51.806 * vh)), np.array((14.688 * vw, 51.806 * vh)),
np.array((14.688 * vw, 59.167 * vh)),
np.array((5.781 * vw, 59.167 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 打开订单页面
def get_my_sell_tasklist(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (
np.array((1.094 * vw, 75.833 * vh)), np.array((41.719 * vw, 75.833 * vh)),
np.array((41.719 * vw, 95.139 * vh)),
np.array((1.094 * vw, 95.139 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 点击 '可交付' 订单
def get_my_sell_task_main(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (np.array((29.297 * vw, 26.528 * vh)), np.array((37.109 * vw, 26.528 * vh)),
np.array((37.109 * vw, 61.111 * vh)), np.array((29.297 * vw, 61.111 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
# 从订单列表中进入另一间贸易设施的订单列表
def get_my_sell_task_2(img):
"""
:returns: [0][1]
[3][2]
"""
aspect = Fraction(*img.size)
vw, vh = util.get_vwvh(img)
if aspect == Fraction(16, 9):
return (
np.array((1.094 * vw, 25.972 * vh)), np.array((16.875 * vw, 25.972 * vh)),
np.array((16.875 * vw, 33.472 * vh)),
np.array((1.094 * vw, 33.472 * vh)))
else:
# FIXME: implement with feature matching?
raise NotImplementedError('unsupported aspect ratio')
if __name__ == "__main__":
import sys
print(check_main(Image.open(sys.argv[-1])))
| 41.028107 | 141 | 0.485307 | 4,293 | 27,735 | 3.099464 | 0.095504 | 0.145198 | 0.067939 | 0.075755 | 0.870284 | 0.812566 | 0.788817 | 0.778972 | 0.778145 | 0.696002 | 0 | 0.179972 | 0.319849 | 27,735 | 675 | 142 | 41.088889 | 0.525392 | 0.152623 | 0 | 0.481865 | 0 | 0 | 0.03406 | 0 | 0 | 0 | 0 | 0.04 | 0 | 1 | 0.085492 | false | 0 | 0.020725 | 0 | 0.243523 | 0.002591 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4713e176f7944296dd89d798b1d55b2ffc0db07c | 32,444 | py | Python | test/test_finite_differences.py | HastingsGreer/mermaid | bd13c5fc427eb8cd9054973a8eaaeb302078182d | [
"Apache-2.0"
] | 120 | 2019-10-29T23:53:02.000Z | 2022-03-30T02:59:58.000Z | test/test_finite_differences.py | AlexanderChristgau/mermaid | ba07883cc3cb5982e4655048a434b4495cb49c6d | [
"Apache-2.0"
] | 10 | 2019-11-05T09:28:35.000Z | 2022-01-09T19:12:51.000Z | test/test_finite_differences.py | AlexanderChristgau/mermaid | ba07883cc3cb5982e4655048a434b4495cb49c6d | [
"Apache-2.0"
] | 19 | 2019-11-10T13:34:39.000Z | 2022-03-13T20:30:10.000Z | # start with the setup
import os
import sys
os.environ["CUDA_VISIBLE_DEVICES"] = ''
sys.path.insert(0,os.path.abspath('..'))
sys.path.insert(0,os.path.abspath('../mermaid'))
sys.path.insert(0,os.path.abspath('../mermaid/libraries'))
import numpy as np
import numpy.testing as npt
import torch
import unittest
import imp
try:
imp.find_module('HtmlTestRunner')
foundHTMLTestRunner = True
import HtmlTestRunner
except ImportError:
foundHTMLTestRunner = False
# done with all the setup
# testing code starts here
import mermaid.finite_differences as FD
#TODO: add tests for non-Neumann boundary conditions (linear extrapolation)
#TODO: do experiments how the non-Neumann bounday conditions behave in practive
class Test_finite_difference_1d_neumann_numpy(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1])
self.fd_np = FD.FD_np(self.spacing, mode='neumann_zero')
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_np.xp( np.array([[1,2,3]]) )
npt.assert_almost_equal( xp, [[2,3,3]])
def test_xm(self):
xm = self.fd_np.xm( np.array([[1,2,3]]) )
npt.assert_almost_equal( xm, [[1,1,2]])
def test_dXb(self):
dxb = self.fd_np.dXb( np.array([[1,2,3]]))
npt.assert_almost_equal( dxb, [[0,10,10]])
def test_dXf(self):
dxf = self.fd_np.dXf( np.array([[1,2,3]]))
npt.assert_almost_equal( dxf, [[10,10,0]])
def test_dXc(self):
dxc = self.fd_np.dXc(np.array([[1, 2, 3]]))
npt.assert_almost_equal(dxc, [[0, 10, 0]])
def test_ddXc(self):
ddxc = self.fd_np.ddXc(np.array([[1, 0, 3]]))
npt.assert_almost_equal(ddxc, [[-0, 400, -0]])
def test_lap(self):
lap = self.fd_np.lap(np.array([[1,0,3]]))
npt.assert_almost_equal(lap, [[-0,400,-0]])
class Test_finite_difference_2d_neumann_numpy(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1,0.2])
self.fd_np = FD.FD_np(self.spacing, mode='neumann_zero')
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_np.xp(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(xp, [[[4, 5, 6],[7,8,9],[7,8,9]]])
def test_xm(self):
xm = self.fd_np.xm(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(xm,[[[1,2,3],[1,2,3],[4,5,6]]])
def test_yp(self):
yp = self.fd_np.yp(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(yp, [[[2, 3, 3],[5,6,6],[8,9,9]]])
def test_ym(self):
ym = self.fd_np.ym(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ym,[[[1,1,2],[4,4,5],[7,7,8]]])
def test_dXb(self):
dxb = self.fd_np.dXb(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxb,[[[0, 0, 0],[30,30,30],[30,30,30]]])
def test_dXf(self):
dxf = self.fd_np.dXf(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxf,[[[30, 30, 30],[30,30,30],[0,0,0]]])
def test_dXc(self):
dxc = self.fd_np.dXc(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxc,[[[0,0, 0],[30,30,30],[0,0,0]]])
def test_dYb(self):
dyb = self.fd_np.dYb(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyb,[[[0, 5, 5],[0,5,5],[0,5,5]]])
def test_dYf(self):
dyf = self.fd_np.dYf(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyf,[[[5, 5, 0],[5,5,0],[5,5,0]]])
def test_dYc(self):
dyc = self.fd_np.dYc(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyc,[[[0.,5, 0.],[0.,5,0.],[0.,5,0.]]])
def test_ddXc(self):
ddxc = self.fd_np.ddXc(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ddxc,[[[0., 0., 0.],[0,0,0],[-0.,-0.,-0.]]])
def test_ddYc(self):
ddyc = self.fd_np.ddYc(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ddyc,[[[0., 0, -0.],[0.,0,-0.],[0.,0,-0.]]])
def test_lap(self):
lap = self.fd_np.lap(np.array([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(lap,[[[0, 0, -0],[0,0,-0],[0,0,-0]]])
class Test_finite_difference_3d_neumann_numpy(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1,0.2,0.5])
self.fd_np = FD.FD_np(self.spacing, mode='neumann_zero')
self.inArray = np.array([[[[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.]],
[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]],
[[ 18., 19., 20.],
[ 21., 22., 23.],
[ 24., 25., 26.]]]])
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_np.xp( self.inArray )
npt.assert_almost_equal(xp, [[[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]],
[[ 18., 19., 20.],
[ 21., 22., 23.],
[ 24., 25., 26.]],
[[18., 19., 20.],
[21., 22., 23.],
[24., 25., 26.]]
]])
def test_xm(self):
xm = self.fd_np.xm(self.inArray)
npt.assert_almost_equal(xm,[[[[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.]],
[[0., 1., 2.],
[3., 4., 5.],
[6., 7., 8.]],
[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]]]])
def test_yp(self):
yp = self.fd_np.yp(self.inArray)
npt.assert_almost_equal(yp, [[[[ 3., 4., 5.],
[ 6., 7., 8.],
[ 6., 7., 8.]],
[[ 12., 13., 14.],
[ 15., 16., 17.],
[ 15., 16., 17.]],
[[ 21., 22., 23.],
[ 24., 25., 26.],
[ 24., 25., 26.]]]])
def test_ym(self):
ym = self.fd_np.ym(self.inArray)
npt.assert_almost_equal(ym,[[[[ 0., 1., 2.],
[ 0., 1., 2.],
[ 3., 4., 5.]],
[[ 9., 10., 11.],
[ 9., 10., 11.],
[ 12., 13., 14.]],
[[ 18., 19., 20.],
[ 18., 19., 20.],
[ 21., 22., 23.]]]])
def test_zp(self):
zp = self.fd_np.zp(self.inArray)
npt.assert_almost_equal(zp,[[[[ 1., 2., 2.],
[ 4., 5., 5.],
[ 7., 8., 8.]],
[[ 10., 11., 11.],
[ 13., 14., 14.],
[ 16., 17., 17.]],
[[ 19., 20., 20.],
[ 22., 23., 23.],
[ 25., 26., 26.]]]])
def test_zm(self):
zm = self.fd_np.zm(self.inArray)
npt.assert_almost_equal(zm,[[[[ 0., 0., 1.],
[ 3., 3., 4.],
[ 6., 6., 7.]],
[[ 9., 9., 10.],
[ 12., 12., 13.],
[ 15., 15., 16.]],
[[ 18., 18., 19.],
[ 21., 21., 22.],
[ 24., 24., 25.]]]])
def test_dXb(self):
dxb = self.fd_np.dXb(self.inArray)
npt.assert_almost_equal(dxb,[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]]]])
def test_dXf(self):
dxf = self.fd_np.dXf(self.inArray)
npt.assert_almost_equal(dxf,[[[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]]]])
def test_dXc(self):
dxc = self.fd_np.dXc(self.inArray)
npt.assert_almost_equal(dxc,[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]]]])
def test_dYb(self):
dyb = self.fd_np.dYb(self.inArray)
npt.assert_almost_equal(dyb,[[[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]],
[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]],
[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]]]])
def test_dYf(self):
dyf = self.fd_np.dYf(self.inArray)
npt.assert_almost_equal(dyf,[[[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]],
[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]],
[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]]]])
def test_dYc(self):
dyc = self.fd_np.dYc(self.inArray)
npt.assert_almost_equal(dyc,[[[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]]]])
def test_dZb(self):
dzb = self.fd_np.dZb(self.inArray)
npt.assert_almost_equal(dzb,[[[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]],
[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]],
[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]]]])
def test_dZf(self):
dzf = self.fd_np.dZf(self.inArray)
npt.assert_almost_equal(dzf,[[[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]],
[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]],
[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]]]])
def test_dZc(self):
dzc = self.fd_np.dZc(self.inArray)
npt.assert_almost_equal(dzc,[[[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]],
[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]],
[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]]]])
def test_ddXc(self):
ddxc = self.fd_np.ddXc(self.inArray)
npt.assert_almost_equal(ddxc,[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[-0., -0., -0.],
[-0., -0., -0.],
[-0., -0., -0.]]]])
def test_ddYc(self):
ddyc = self.fd_np.ddYc(self.inArray)
npt.assert_almost_equal(ddyc,[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]]]])
def test_ddZc(self):
ddzc = self.fd_np.ddZc(self.inArray)
npt.assert_almost_equal(ddzc,[[[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]]]])
def test_lap(self):
lap = self.fd_np.lap(self.inArray)
npt.assert_almost_equal(lap,[[[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]]]])
class Test_finite_difference_1d_neumann_torch(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1])
self.fd_torch = FD.FD_torch(self.spacing, mode='neumann_zero')
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_torch.xp(torch.FloatTensor([[1,2,3]]) )
npt.assert_almost_equal( xp.detach().cpu().numpy(), [[2,3,3]])
def test_xm(self):
xm = self.fd_torch.xm(torch.FloatTensor([[1,2,3]]) )
npt.assert_almost_equal( xm.detach().cpu().numpy(), [[1,1,2]])
def test_dXb(self):
dxb = self.fd_torch.dXb( torch.FloatTensor([[1,2,3]] ))
npt.assert_almost_equal( dxb.detach().cpu().numpy(), [[0,10,10]])
def test_dXf(self):
dxf = self.fd_torch.dXf(torch.FloatTensor([[1,2,3]]) )
npt.assert_almost_equal( dxf.detach().cpu().numpy(), [[10,10,0]])
def test_dXc(self):
dxc = self.fd_torch.dXc(torch.FloatTensor([[1, 2, 3]]) )
npt.assert_almost_equal(dxc.detach().cpu().numpy(), [[0, 10, 0]])
def test_ddXc(self):
ddxc = self.fd_torch.ddXc(torch.FloatTensor([[1, 0, 3]]) )
npt.assert_almost_equal(ddxc.detach().cpu().numpy(), [[-0, 400, -0]])
def test_lap(self):
lap = self.fd_torch.lap(torch.FloatTensor([[1,0,3]]) )
npt.assert_almost_equal(lap.detach().cpu().numpy(), [[-0,400,-0]])
class Test_finite_difference_2d_neumann_torch(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1,0.2])
self.fd_torch = FD.FD_torch(self.spacing, mode='neumann_zero')
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_torch.xp(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(xp.detach().cpu().numpy(), [[[4, 5, 6],[7,8,9],[7,8,9]]])
def test_xm(self):
xm = self.fd_torch.xm(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(xm.detach().cpu().numpy(),[[[1,2,3],[1,2,3],[4,5,6]]])
def test_yp(self):
yp = self.fd_torch.yp(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(yp.detach().cpu().numpy(), [[[2, 3, 3],[5,6,6],[8,9,9]]])
def test_ym(self):
ym = self.fd_torch.ym( torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ym.detach().cpu().numpy(),[[[1,1,2],[4,4,5],[7,7,8]]])
def test_dXb(self):
dxb = self.fd_torch.dXb(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxb.detach().cpu().numpy(),[[[0, 0, 0],[30,30,30],[30,30,30]]])
def test_dXf(self):
dxf = self.fd_torch.dXf(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxf.detach().cpu().numpy(),[[[30, 30, 30],[30,30,30],[0,0,0]]])
def test_dXc(self):
dxc = self.fd_torch.dXc(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dxc.detach().cpu().numpy(),[[[0,0, 0],[30,30,30],[0,0,0]]])
def test_dYb(self):
dyb = self.fd_torch.dYb(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyb.detach().cpu().numpy(),[[[0, 5, 5],[0,5,5],[0,5,5]]])
def test_dYf(self):
dyf = self.fd_torch.dYf(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyf.detach().cpu().numpy(),[[[5, 5, 0],[5,5,0],[5,5,0]]])
def test_dYc(self):
dyc = self.fd_torch.dYc(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(dyc.detach().cpu().numpy(),[[[0,5, 0],[0,5,0],[0,5,0]]])
def test_ddXc(self):
ddxc = self.fd_torch.ddXc(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ddxc.detach().cpu().numpy(),[[[0, 0, 0],[0,0,0],[-0,-0,-0]]])
def test_ddYc(self):
ddyc = self.fd_torch.ddYc(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(ddyc.detach().cpu().numpy(),[[[0, 0, -0],[0,0,-0],[0,0,-0]]])
def test_lap(self):
lap = self.fd_torch.lap(torch.FloatTensor([[[1, 2, 3],[4,5,6],[7,8,9]]]))
npt.assert_almost_equal(lap.detach().cpu().numpy(),[[[0, 0, -0],[0,0,-0],[0,0,-0]]])
class Test_finite_difference_3d_neumann_torch(unittest.TestCase):
def setUp(self):
self.spacing = np.array([0.1,0.2,0.5])
self.fd_torch = FD.FD_torch(self.spacing, mode='neumann_zero')
self.inArray = torch.FloatTensor([[[[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.]],
[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]],
[[ 18., 19., 20.],
[ 21., 22., 23.],
[ 24., 25., 26.]]]])
def tearDown(self):
pass
def test_xp(self):
xp = self.fd_torch.xp( self.inArray )
npt.assert_almost_equal(xp.detach().cpu().numpy(), [[[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]],
[[ 18., 19., 20.],
[ 21., 22., 23.],
[ 24., 25., 26.]],
[[18., 19., 20.],
[21., 22., 23.],
[24., 25., 26.]]]
])
def test_xm(self):
xm = self.fd_torch.xm(self.inArray)
npt.assert_almost_equal(xm.detach().cpu().numpy(),[[[[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.]],
[[0., 1., 2.],
[3., 4., 5.],
[6., 7., 8.]],
[[ 9., 10., 11.],
[ 12., 13., 14.],
[ 15., 16., 17.]]]])
def test_yp(self):
yp = self.fd_torch.yp(self.inArray)
npt.assert_almost_equal(yp.detach().cpu().numpy(), [[[[ 3., 4., 5.],
[ 6., 7., 8.],
[ 6., 7., 8.]],
[[ 12., 13., 14.],
[ 15., 16., 17.],
[ 15., 16., 17.]],
[[ 21., 22., 23.],
[ 24., 25., 26.],
[ 24., 25., 26.]]]])
def test_ym(self):
ym = self.fd_torch.ym(self.inArray)
npt.assert_almost_equal(ym.detach().cpu().numpy(),[[[[ 0., 1., 2.],
[ 0., 1., 2.],
[ 3., 4., 5.]],
[[ 9., 10., 11.],
[ 9., 10., 11.],
[ 12., 13., 14.]],
[[ 18., 19., 20.],
[ 18., 19., 20.],
[ 21., 22., 23.]]]])
def test_zp(self):
zp = self.fd_torch.zp(self.inArray)
npt.assert_almost_equal(zp.detach().cpu().numpy(),[[[[ 1., 2., 2.],
[ 4., 5., 5.],
[ 7., 8., 8.]],
[[ 10., 11., 11.],
[ 13., 14., 14.],
[ 16., 17., 17.]],
[[ 19., 20., 20.],
[ 22., 23., 23.],
[ 25., 26., 26.]]]])
def test_zm(self):
zm = self.fd_torch.zm(self.inArray)
npt.assert_almost_equal(zm.detach().cpu().numpy(),[[[[ 0., 0., 1.],
[ 3., 3., 4.],
[ 6., 6., 7.]],
[[ 9., 9., 10.],
[ 12., 12., 13.],
[ 15., 15., 16.]],
[[ 18., 18., 19.],
[ 21., 21., 22.],
[ 24., 24., 25.]]]])
def test_dXb(self):
dxb = self.fd_torch.dXb(self.inArray)
npt.assert_almost_equal(dxb.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]]]])
def test_dXf(self):
dxf = self.fd_torch.dXf(self.inArray)
npt.assert_almost_equal(dxf.detach().cpu().numpy(),[[[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]]]])
def test_dXc(self):
dxc = self.fd_torch.dXc(self.inArray)
npt.assert_almost_equal(dxc.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 90., 90., 90.],
[ 90., 90., 90.],
[ 90., 90., 90.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]]]])
def test_dYb(self):
dyb = self.fd_torch.dYb(self.inArray)
npt.assert_almost_equal(dyb.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]],
[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]],
[[ 0., 0., 0.],
[ 15., 15., 15.],
[ 15., 15., 15.]]]])
def test_dYf(self):
dyf = self.fd_torch.dYf(self.inArray)
npt.assert_almost_equal(dyf.detach().cpu().numpy(),[[[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]],
[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]],
[[ 15., 15., 15.],
[ 15., 15., 15.],
[ 0., 0., 0.]]]])
def test_dYc(self):
dyc = self.fd_torch.dYc(self.inArray)
npt.assert_almost_equal(dyc.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 15. , 15. , 15. ],
[ 0., 0., 0.]]]])
def test_dZb(self):
dzb = self.fd_torch.dZb(self.inArray)
npt.assert_almost_equal(dzb.detach().cpu().numpy(),[[[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]],
[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]],
[[ 0., 2., 2.],
[ 0., 2., 2.],
[ 0., 2., 2.]]]])
def test_dZf(self):
dzf = self.fd_torch.dZf(self.inArray)
npt.assert_almost_equal(dzf.detach().cpu().numpy(),[[[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]],
[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]],
[[ 2., 2., 0.],
[ 2., 2., 0.],
[ 2., 2., 0.]]]])
def test_dZc(self):
dzc = self.fd_torch.dZc(self.inArray)
npt.assert_almost_equal(dzc.detach().cpu().numpy(),[[[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]],
[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]],
[[ 0., 2., 0.],
[ 0., 2., 0.],
[ 0., 2., 0.]]]])
def test_ddXc(self):
ddxc = self.fd_torch.ddXc(self.inArray)
npt.assert_almost_equal(ddxc.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]]]])
def test_ddYc(self):
ddyc = self.fd_torch.ddYc(self.inArray)
npt.assert_almost_equal(ddyc.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]],
[[ 0., 0., 0.],
[ 0., 0., 0.],
[-0., -0., -0.]]]])
def test_ddZc(self):
ddzc = self.fd_torch.ddZc(self.inArray)
npt.assert_almost_equal(ddzc.detach().cpu().numpy(),[[[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]],
[[ 0., 0., -0.],
[ 0., 0., -0.],
[ 0., 0., -0.]]]])
def test_lap(self):
lap = self.fd_torch.lap(self.inArray)
npt.assert_almost_equal(lap.detach().cpu().numpy(),[[[[ 0., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]],
[[ 0., 0., 0.],
[ 0., 0., -0.],
[ -0., -0., -0.]],
[[-0., -0., -0.],
[-0., -0., -0.],
[-0., -0., -0.]]]])
if __name__ == '__main__':
if foundHTMLTestRunner:
unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(output='test_output'))
else:
unittest.main()
| 44.141497 | 95 | 0.294631 | 3,333 | 32,444 | 2.756076 | 0.040504 | 0.089266 | 0.110385 | 0.125408 | 0.92924 | 0.923144 | 0.919987 | 0.914653 | 0.766492 | 0.694644 | 0 | 0.135379 | 0.514826 | 32,444 | 734 | 96 | 44.201635 | 0.448193 | 0.006812 | 0 | 0.690438 | 0 | 0 | 0.004874 | 0 | 0 | 0 | 0 | 0.001362 | 0.126418 | 1 | 0.145867 | false | 0.009724 | 0.016207 | 0 | 0.171799 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5b3ddb3ca2d2b417a64819b2eb13e7ea92530c4a | 2,191 | py | Python | src/tests/test_package.py | ghackebeil/pyomo_kernel_extensions | 8998b334195d4fa9f48e164c223ecd2ccc3e36d3 | [
"MIT"
] | null | null | null | src/tests/test_package.py | ghackebeil/pyomo_kernel_extensions | 8998b334195d4fa9f48e164c223ecd2ccc3e36d3 | [
"MIT"
] | null | null | null | src/tests/test_package.py | ghackebeil/pyomo_kernel_extensions | 8998b334195d4fa9f48e164c223ecd2ccc3e36d3 | [
"MIT"
] | null | null | null | import sys
import pyomo_kernel_extensions
is_pypy = False
try:
import __pypy__ # noqa: F401
is_pypy = True
except ImportError:
is_pypy = False
class Test:
# See what Python versions the combined
# coverage report includes
def test_show_coverage(self):
if sys.platform.startswith("linux"):
assert sys.version_info.major == 3
if not is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
elif sys.version_info.minor == 7:
print(sys.version_info)
elif sys.version_info.minor == 8:
print(sys.version_info)
elif sys.version_info.minor == 9:
print(sys.version_info)
if is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
elif sys.platform.startswith("darwin"):
assert sys.version_info.major == 3
if not is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
elif sys.version_info.minor == 7:
print(sys.version_info)
elif sys.version_info.minor == 8:
print(sys.version_info)
elif sys.version_info.minor == 9:
print(sys.version_info)
if is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
elif sys.platform.startswith("win32"):
assert sys.version_info.major == 3
if not is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
elif sys.version_info.minor == 7:
print(sys.version_info)
elif sys.version_info.minor == 8:
print(sys.version_info)
elif sys.version_info.minor == 9:
print(sys.version_info)
if is_pypy:
if sys.version_info.minor == 6:
print(sys.version_info)
def test_version(self):
pyomo_kernel_extensions.__version__
| 34.234375 | 49 | 0.525787 | 250 | 2,191 | 4.38 | 0.188 | 0.30137 | 0.421918 | 0.260274 | 0.747032 | 0.747032 | 0.747032 | 0.747032 | 0.747032 | 0.747032 | 0 | 0.017319 | 0.393884 | 2,191 | 63 | 50 | 34.777778 | 0.807229 | 0.033318 | 0 | 0.759259 | 0 | 0 | 0.007569 | 0 | 0 | 0 | 0 | 0 | 0.055556 | 1 | 0.037037 | false | 0 | 0.074074 | 0 | 0.12963 | 0.277778 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
5b5d342afcaf67e186f6a0bb3190621d035077dd | 1,103 | py | Python | tests/test_test_utils.py | TeamHG-Memex/extract-html-diff | 812cbff33488da22d185a5d58fc5bd3d592d63bb | [
"MIT"
] | 29 | 2017-02-08T13:59:06.000Z | 2022-02-02T16:00:37.000Z | tests/test_test_utils.py | TeamHG-Memex/extract-html-diff | 812cbff33488da22d185a5d58fc5bd3d592d63bb | [
"MIT"
] | 4 | 2017-02-08T15:03:10.000Z | 2017-11-22T07:36:59.000Z | tests/test_test_utils.py | TeamHG-Memex/extract-html-diff | 812cbff33488da22d185a5d58fc5bd3d592d63bb | [
"MIT"
] | 6 | 2017-05-20T21:34:06.000Z | 2020-05-29T14:12:02.000Z | import pytest
from lxml.html import fragment_fromstring
from .utils import assert_elements_equal
def test_assert_elements_equal():
assert_elements_equal(
fragment_fromstring('<div>foo<a href="bar" class="baz"><br/></div>'),
fragment_fromstring('<div>foo <a class="baz" href="bar"> <br/></div>'))
with pytest.raises(AssertionError):
assert_elements_equal(
fragment_fromstring('<div><div>foo</div></div>'),
fragment_fromstring('<div>foo</div>'))
with pytest.raises(AssertionError):
assert_elements_equal(
fragment_fromstring('<div><div>foo</div></div>'),
fragment_fromstring('<div><div>food</div></div>'))
with pytest.raises(AssertionError):
assert_elements_equal(
fragment_fromstring('<div class="foo">foo</div>'),
fragment_fromstring('<div class="foo" id="bar">foo</div>'))
with pytest.raises(AssertionError):
assert_elements_equal(
fragment_fromstring('<div class="foo">foo</div>'),
fragment_fromstring('<div class="food">foo</div>'))
| 40.851852 | 79 | 0.645512 | 128 | 1,103 | 5.359375 | 0.203125 | 0.28863 | 0.306122 | 0.196793 | 0.75656 | 0.705539 | 0.64723 | 0.64723 | 0.64723 | 0.64723 | 0 | 0 | 0.19311 | 1,103 | 26 | 80 | 42.423077 | 0.770787 | 0 | 0 | 0.565217 | 0 | 0 | 0.268359 | 0.147779 | 0 | 0 | 0 | 0 | 0.478261 | 1 | 0.043478 | true | 0 | 0.130435 | 0 | 0.173913 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5bb440967b97448d004ea847f03745560f5c0fb8 | 16,751 | py | Python | generate_square.py | ashishrao7/Neuromorphic-Sampling | ab921456ebcada361c64f24cd0fd727771d1acd8 | [
"MIT"
] | null | null | null | generate_square.py | ashishrao7/Neuromorphic-Sampling | ab921456ebcada361c64f24cd0fd727771d1acd8 | [
"MIT"
] | null | null | null | generate_square.py | ashishrao7/Neuromorphic-Sampling | ab921456ebcada361c64f24cd0fd727771d1acd8 | [
"MIT"
] | null | null | null | ################################################################################################################################################################
# This program generates the background and elementary patterns . Also saves frames of the pattern moving over the background which are later fed to the simulator
# Author: Ashish Rao M
# email: ashish.rao.m@gmail.com
##########################################################################################################################################################
import numpy as np
import matplotlib.pyplot as plt
import math
import cv2
from cv2 import VideoWriter, VideoWriter_fourcc
def gkern(l=35, sig=2.5):
"""
creates gaussian kernel with side length l and a sigma of sig
"""
ax = np.arange(-l // 2 + 1., l // 2 + 1.)
xx, yy = np.meshgrid(ax, ax)
kernel = np.exp(-0.5 * (np.square(xx) + np.square(yy)) / np.square(sig))
return kernel / np.sum(kernel)
def sampling_square():
square_pattern = [[ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255],
[ 255, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 255],
[ 255, 240, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 240, 255],
[ 255, 240, 225, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 60, 60, 60, 60, 60, 60, 60, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 45, 45, 45, 45, 45, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 30, 30, 30, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 15, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 15, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 30, 30, 30, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 45, 45, 45, 45, 45, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 60, 60, 60, 60, 60, 60, 60, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 225, 240, 255],
[ 255, 240, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 240, 255],
[ 255, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 255],
[ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]]
square_pattern = [[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255],
[ 255, 240, 225, 210, 195, 180, 165, 150, 135, 120, 105, 90, 75, 60, 45, 30, 15, 0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225, 240, 255]]
#square_pattern = gkern()*10000
pattern=np.array(square_pattern, dtype=np.uint8)
plt.imshow(pattern, cmap='gray')
plt.axis('off')
plt.show()
cv2.imwrite("Images/sampling_pattern_single_channel.png", pattern)
cv2.imwrite("Images/sampling_pattern.png", cv2.cvtColor(pattern,cv2.COLOR_GRAY2RGB))
return pattern
def background():
bckgd = 255 * np.ones((260, 346), dtype=np.uint8)
cv2.imwrite("Images/background_single_channel.png", bckgd)
cv2.imwrite("Images/background.png", cv2.cvtColor(bckgd,cv2.COLOR_GRAY2RGB))
return bckgd
def original_image(pattern):
pass
def move_image_on_background(pattern, background):
'''
The function creates a video of the pattern moving horizontally over a given background
Parameters:
-----------
pattern: <np.array, 35x35>
The pattern supposed to move over the background
background: <np.array, 260x346>
A white background of the given size
'''
fourcc = VideoWriter_fourcc(*'HFYU')
video = VideoWriter('./videos/moving_pattern_for_sampling_exp.avi', fourcc, 30, (346, 260))
background[112:147, 0:35] = pattern
frame = background
count = 0
for _ in range(0, 346-34):
cv2.imwrite("video_images/frame%04d.png" % count, cv2.cvtColor(frame,cv2.COLOR_GRAY2RGB))
video.write(cv2.cvtColor(frame,cv2.COLOR_GRAY2RGB))
shifted_frame = np.roll(frame, 1, axis=1)
frame = shifted_frame
video.write(frame)
count+=1
cv2.destroyAllWindows()
video.release()
def main():
pattern = sampling_square()
bckgd = background()
move_image_on_background(pattern, bckgd)
if __name__=='__main__':
main() | 108.070968 | 208 | 0.497881 | 2,783 | 16,751 | 2.982034 | 0.050665 | 0.098325 | 0.073744 | 0.095433 | 0.828052 | 0.813833 | 0.806121 | 0.806121 | 0.806121 | 0.806121 | 0 | 0.587445 | 0.311504 | 16,751 | 155 | 209 | 108.070968 | 0.132143 | 0.034088 | 0 | 0.568966 | 1 | 0 | 0.0136 | 0.012398 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051724 | false | 0.008621 | 0.043103 | 0 | 0.12069 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
5bcca77c2e8267ef6e8ac1c5670f471d74c4f989 | 204 | py | Python | rpython/jit/backend/x86/test/test_rvmprof.py | m4sterchain/mesapy | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | [
"Apache-2.0",
"OpenSSL"
] | 381 | 2018-08-18T03:37:22.000Z | 2022-02-06T23:57:36.000Z | rpython/jit/backend/x86/test/test_rvmprof.py | m4sterchain/mesapy | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | [
"Apache-2.0",
"OpenSSL"
] | 16 | 2018-09-22T18:12:47.000Z | 2022-02-22T20:03:59.000Z | rpython/jit/backend/x86/test/test_rvmprof.py | m4sterchain/mesapy | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | [
"Apache-2.0",
"OpenSSL"
] | 30 | 2018-08-20T03:16:34.000Z | 2022-01-12T17:39:22.000Z |
import py
from rpython.jit.backend.test.test_rvmprof import BaseRVMProfTest
from rpython.jit.backend.x86.test.test_basic import Jit386Mixin
class TestRVMProfCall(Jit386Mixin, BaseRVMProfTest):
pass
| 25.5 | 65 | 0.838235 | 26 | 204 | 6.5 | 0.576923 | 0.130178 | 0.16568 | 0.248521 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.098039 | 204 | 7 | 66 | 29.142857 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.2 | 0.6 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
5be5a7ac85b92fe9aad01ac736e7709a01c99d63 | 13,902 | py | Python | venv/lib/python3.8/site-packages/spaceone/api/spot_automation/v1/spot_group_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/spaceone/api/spot_automation/v1/spot_group_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/spaceone/api/spot_automation/v1/spot_group_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from spaceone.api.spot_automation.v1 import spot_group_pb2 as spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2
class SpotGroupStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/create',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CreateSpotGroupRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
)
self.update = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/update',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.UpdateSpotGroupRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
)
self.delete = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/delete',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.get = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/get',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetSpotGroupRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
)
self.list = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/list',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.QuerySpotGroupRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupsInfo.FromString,
)
self.get_candidates = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/get_candidates',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetCandidatesRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CandidatesInfo.FromString,
)
self.stat = channel.unary_unary(
'/spaceone.api.spot_automation.v1.SpotGroup/stat',
request_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupStatRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_struct__pb2.Struct.FromString,
)
class SpotGroupServicer(object):
"""Missing associated documentation comment in .proto file."""
def create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get_candidates(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SpotGroupServicer_to_server(servicer, server):
rpc_method_handlers = {
'create': grpc.unary_unary_rpc_method_handler(
servicer.create,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CreateSpotGroupRequest.FromString,
response_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.SerializeToString,
),
'update': grpc.unary_unary_rpc_method_handler(
servicer.update,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.UpdateSpotGroupRequest.FromString,
response_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.SerializeToString,
),
'delete': grpc.unary_unary_rpc_method_handler(
servicer.delete,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'get': grpc.unary_unary_rpc_method_handler(
servicer.get,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetSpotGroupRequest.FromString,
response_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.SerializeToString,
),
'list': grpc.unary_unary_rpc_method_handler(
servicer.list,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.QuerySpotGroupRequest.FromString,
response_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupsInfo.SerializeToString,
),
'get_candidates': grpc.unary_unary_rpc_method_handler(
servicer.get_candidates,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetCandidatesRequest.FromString,
response_serializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CandidatesInfo.SerializeToString,
),
'stat': grpc.unary_unary_rpc_method_handler(
servicer.stat,
request_deserializer=spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupStatRequest.FromString,
response_serializer=google_dot_protobuf_dot_struct__pb2.Struct.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'spaceone.api.spot_automation.v1.SpotGroup', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class SpotGroup(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/create',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CreateSpotGroupRequest.SerializeToString,
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/update',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.UpdateSpotGroupRequest.SerializeToString,
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/delete',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/get',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetSpotGroupRequest.SerializeToString,
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def list(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/list',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.QuerySpotGroupRequest.SerializeToString,
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupsInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get_candidates(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/get_candidates',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.GetCandidatesRequest.SerializeToString,
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.CandidatesInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def stat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.spot_automation.v1.SpotGroup/stat',
spaceone_dot_api_dot_spot__automation_dot_v1_dot_spot__group__pb2.SpotGroupStatRequest.SerializeToString,
google_dot_protobuf_dot_struct__pb2.Struct.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 52.067416 | 142 | 0.706445 | 1,467 | 13,902 | 6.182004 | 0.07771 | 0.057118 | 0.050281 | 0.069357 | 0.911898 | 0.908921 | 0.897563 | 0.852795 | 0.818392 | 0.776602 | 0 | 0.009401 | 0.227162 | 13,902 | 266 | 143 | 52.263158 | 0.834698 | 0.057546 | 0 | 0.540179 | 1 | 0 | 0.084057 | 0.056012 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.017857 | 0.03125 | 0.133929 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
75143b9f3e623ca63856093e48133c1677ad16b8 | 19,234 | py | Python | pyasdf/tests/test_low_level.py | nden/pyasdf | 6a786587e0731cfbfb8ec1644eeab4f43ec76f08 | [
"BSD-3-Clause"
] | null | null | null | pyasdf/tests/test_low_level.py | nden/pyasdf | 6a786587e0731cfbfb8ec1644eeab4f43ec76f08 | [
"BSD-3-Clause"
] | null | null | null | pyasdf/tests/test_low_level.py | nden/pyasdf | 6a786587e0731cfbfb8ec1644eeab4f43ec76f08 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import io
import os
from astropy.extern import six
from astropy.tests.helper import pytest
import numpy as np
from numpy.testing import assert_array_equal
from .. import asdf
from .. import generic_io
def _get_small_tree():
x = np.arange(0, 10, dtype=np.float)
tree = {
'science_data': x,
'subset': x[3:-3],
'skipping': x[::2],
'not_shared': np.arange(10, 0, -1, dtype=np.uint8)
}
return tree
def test_no_yaml_end_marker(tmpdir):
content = b"""#ASDF 0.1.0
%YAML 1.2
%TAG ! tag:stsci.edu:asdf/0.1.0/
--- !core/asdf
foo: bar...baz
baz: 42
"""
path = os.path.join(str(tmpdir), 'test.asdf')
buff = io.BytesIO(content)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
buff.seek(0)
fd = generic_io.InputStream(buff, 'r')
with pytest.raises(ValueError):
asdf.AsdfFile.read(fd)
with open(path, 'wb') as fd:
fd.write(content)
with open(path, 'rb') as fd:
with pytest.raises(ValueError):
asdf.AsdfFile.read(fd)
def test_no_final_newline(tmpdir):
content = b"""#ASDF 0.1.0
%YAML 1.2
%TAG ! tag:stsci.edu:asdf/0.1.0/
--- !core/asdf
foo: ...bar...
baz: 42
..."""
path = os.path.join(str(tmpdir), 'test.asdf')
buff = io.BytesIO(content)
with asdf.AsdfFile.read(buff) as ff:
assert len(ff.tree) == 2
buff.seek(0)
fd = generic_io.InputStream(buff, 'r')
with asdf.AsdfFile.read(fd) as ff:
assert len(ff.tree) == 2
with open(path, 'wb') as fd:
fd.write(content)
with open(path, 'rb') as fd:
with asdf.AsdfFile.read(fd) as ff:
assert len(ff.tree) == 2
def test_no_asdf_header(tmpdir):
content = b"What? This ain't no ASDF file"
path = os.path.join(str(tmpdir), 'test.asdf')
buff = io.BytesIO(content)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
with open(path, 'wb') as fd:
fd.write(content)
with open(path, 'rb') as fd:
with pytest.raises(ValueError):
asdf.AsdfFile.read(fd)
def test_no_asdf_blocks(tmpdir):
content = b"""#ASDF 0.1.0
%YAML 1.2
%TAG ! tag:stsci.edu:asdf/0.1.0/
--- !core/asdf
foo: bar
...
XXXXXXXX
"""
path = os.path.join(str(tmpdir), 'test.asdf')
buff = io.BytesIO(content)
ff = asdf.AsdfFile.read(buff)
assert len(ff.blocks) == 0
buff.seek(0)
fd = generic_io.InputStream(buff, 'r')
ff = asdf.AsdfFile.read(fd)
assert len(ff.blocks) == 0
with open(path, 'wb') as fd:
fd.write(content)
with open(path, 'rb') as fd:
ff = asdf.AsdfFile.read(fd)
assert len(ff.blocks) == 0
def test_invalid_source():
buff = io.BytesIO()
ff = asdf.AsdfFile(_get_small_tree())
ff.write_to(buff)
buff.seek(0)
ff2 = asdf.AsdfFile.read(buff)
ff2.blocks.get_block(0)
with pytest.raises(ValueError):
ff2.blocks.get_block(2)
with pytest.raises(IOError):
ff2.blocks.get_block("http://127.0.0.1/")
with pytest.raises(TypeError):
ff2.blocks.get_block(42.0)
with pytest.raises(ValueError):
ff2.blocks.get_source(42.0)
block = ff2.blocks.get_block(0)
assert ff2.blocks.get_source(block) == 0
def test_empty_file():
buff = io.BytesIO(b"#ASDF 0.1.0\n")
buff.seek(0)
ff = asdf.AsdfFile.read(buff)
assert ff.tree == {}
assert len(ff.blocks) == 0
def test_not_asdf_file():
buff = io.BytesIO(b"SIMPLE")
buff.seek(0)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
buff = io.BytesIO(b"SIMPLE\n")
buff.seek(0)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
def test_junk_file():
buff = io.BytesIO(b"#ASDF 0.1.0\nFOO")
buff.seek(0)
with pytest.raises(IOError):
asdf.AsdfFile.read(buff)
def test_block_mismatch():
# This is a file with a single small block, followed by something
# that has an invalid block magic number.
buff = io.BytesIO(
b'#ASDF 0.1.0\n\xd3BLK\x00\x28\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0FOOBAR')
buff.seek(0)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
def test_block_header_too_small():
# The block header size must be at least 40
buff = io.BytesIO(
b'#ASDF 0.1.0\n\xd3BLK\0\0')
buff.seek(0)
with pytest.raises(ValueError):
asdf.AsdfFile.read(buff)
if six.PY2:
def test_file_already_closed(tmpdir):
# Test that referencing specific blocks in another asdf file
# works.
tree = _get_small_tree()
path = os.path.join(str(tmpdir), 'test.asdf')
ff = asdf.AsdfFile(tree)
ff.write_to(path)
with open(path, 'rb') as fd:
ff2 = asdf.AsdfFile.read(fd)
with pytest.raises(IOError):
str(ff2.tree['science_data'][:])
def test_external_block(tmpdir):
tmpdir = str(tmpdir)
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
ff = asdf.AsdfFile(tree)
ff.set_array_storage(my_array, 'external')
assert ff.get_array_storage(my_array) == 'external'
with ff.write_to(os.path.join(tmpdir, "test.asdf")):
pass
assert 'test0000.asdf' in os.listdir(tmpdir)
def test_external_block_non_url():
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
ff = asdf.AsdfFile(tree)
ff.set_array_storage(my_array, 'external')
assert ff.get_array_storage(my_array) == 'external'
buff = io.BytesIO()
with pytest.raises(ValueError):
with ff.write_to(buff):
pass
def test_invalid_array_storage():
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
ff = asdf.AsdfFile(tree)
with pytest.raises(ValueError):
ff.set_array_storage(my_array, 'foo')
def test_transfer_array_sources(tmpdir):
tmpdir = str(tmpdir)
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
ff = asdf.AsdfFile(tree)
with ff.write_to(os.path.join(tmpdir, "test.asdf")):
pass
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
original_fd = ff._fd
assert_array_equal(my_array, ff.tree['my_array'])
ff.write_to(os.path.join(tmpdir, "test2.asdf"))
# Assert that the original file is closed
assert original_fd._fd.closed
# ...but when we access the data it is magically opened from
# the new file.
assert_array_equal(my_array, ff.tree['my_array'])
assert ff._fd is None
def test_write_to_same(tmpdir):
tmpdir = str(tmpdir)
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
ff = asdf.AsdfFile(tree)
with ff.write_to(os.path.join(tmpdir, "test.asdf")):
pass
with asdf.AsdfFile.read(
os.path.join(tmpdir, "test.asdf"), mode='rw') as ff:
assert_array_equal(my_array, ff.tree['my_array'])
ff.tree['extra'] = [0] * 1000
ff.write_to(os.path.join(tmpdir, "test2.asdf"))
with asdf.AsdfFile.read(
os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff:
assert_array_equal(my_array, ff.tree['my_array'])
def test_pad_blocks(tmpdir):
tmpdir = str(tmpdir)
# This is the case where the new tree can't fit in the available space
my_array = np.ones((8, 8)) * 1
my_array2 = np.ones((42, 5)) * 2
tree = {
'my_array': my_array,
'my_array2': my_array2
}
with asdf.AsdfFile(tree) as ff:
ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['my_array'], my_array)
assert_array_equal(ff.tree['my_array2'], my_array2)
def test_update_expand_tree(tmpdir):
tmpdir = str(tmpdir)
# This is the case where the new tree can't fit in the available space
my_array = np.arange(64) * 1
my_array2 = np.arange(64) * 2
tree = {
'my_array': my_array,
'my_array2': my_array2,
'my_array3': np.arange(3)
}
with asdf.AsdfFile(tree) as ff:
ff.blocks[tree['my_array3']].array_storage = 'inline'
ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)
orig_offset = ff.blocks[ff.tree['my_array']].offset
orig_offset2 = ff.blocks[ff.tree['my_array2']].offset
ff.tree['extra'] = [0] * 6000
ff.update()
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert orig_offset != ff.blocks[ff.tree['my_array']].offset
assert orig_offset2 != ff.blocks[ff.tree['my_array2']].offset
assert ff.blocks[ff.tree['my_array3']].array_storage == 'inline'
assert_array_equal(ff.tree['my_array'], my_array)
assert_array_equal(ff.tree['my_array2'], my_array2)
# Now, we expand the header only by a little bit
with asdf.AsdfFile(tree) as ff:
ff.blocks[tree['my_array3']].array_storage = 'inline'
ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)
orig_offset = ff.blocks[ff.tree['my_array']].offset
ff.tree['extra'] = [0] * 2
ff.update()
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert orig_offset == ff.blocks[ff.tree['my_array']].offset
assert ff.blocks[ff.tree['my_array3']].array_storage == 'inline'
assert_array_equal(ff.tree['my_array'], my_array)
assert_array_equal(ff.tree['my_array2'], my_array2)
def _get_update_tree():
return {
'arrays': [
np.arange(64) * 1,
np.arange(64) * 2,
np.arange(64) * 3
]
}
def test_update_delete_first_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
del ff.tree['arrays'][0]
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][1])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2])
def test_update_delete_last_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
del ff.tree['arrays'][-1]
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1])
def test_update_delete_middle_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
del ff.tree['arrays'][1]
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2])
def test_update_replace_first_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
ff.tree['arrays'][0] = np.arange(32)
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], np.arange(32))
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1])
assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2])
def test_update_replace_last_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
ff.tree['arrays'][2] = np.arange(32)
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1])
assert_array_equal(ff.tree['arrays'][2], np.arange(32))
def test_update_replace_middle_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
ff.tree['arrays'][1] = np.arange(32)
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], np.arange(32))
assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2])
def test_update_add_array(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
ff.tree['arrays'].append(np.arange(32))
ff.update()
assert os.stat(path).st_size == original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1])
assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2])
assert_array_equal(ff.tree['arrays'][3], np.arange(32))
def test_update_add_array_at_end(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
# This is the case where the new tree can't fit in the available space
tree = _get_update_tree()
with asdf.AsdfFile(tree) as ff:
ff.write_to(path, pad_blocks=True)
original_size = os.stat(path).st_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff:
ff.tree['arrays'].append(np.arange(2048))
ff.update()
assert os.stat(path).st_size > original_size
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0])
assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1])
assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2])
assert_array_equal(ff.tree['arrays'][3], np.arange(2048))
print([x.offset for x in ff.blocks._blocks])
def test_update_replace_all_arrays(tmpdir):
tmpdir = str(tmpdir)
# This is the case where the new tree can't fit in the available space
my_array = np.ones((64, 64)) * 1
tree = {
'my_array': my_array,
}
with asdf.AsdfFile(tree) as ff:
ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)
ff.tree['my_array'] = np.ones((64, 64)) * 2
ff.update()
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2)
def test_update_array_in_place(tmpdir):
tmpdir = str(tmpdir)
# This is the case where the new tree can't fit in the available space
my_array = np.ones((64, 64)) * 1
tree = {
'my_array': my_array,
}
with asdf.AsdfFile(tree) as ff:
ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)
ff.tree['my_array'] *= 2
ff.update()
with asdf.AsdfFile.read(os.path.join(tmpdir, "test.asdf")) as ff:
assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2)
def test_init_from_asdffile(tmpdir):
tmpdir = str(tmpdir)
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
with asdf.AsdfFile(tree) as ff:
ff2 = asdf.AsdfFile(ff)
assert ff.tree['my_array'] is ff2.tree['my_array']
assert_array_equal(ff.tree['my_array'], ff2.tree['my_array'])
assert ff.blocks[my_array] != ff2.blocks[my_array]
ff2.tree['my_array'] = None
assert_array_equal(ff.tree['my_array'], my_array)
ff.write_to(os.path.join(tmpdir, 'test.asdf'))
with asdf.AsdfFile().read(os.path.join(tmpdir, 'test.asdf')) as ff:
ff2 = asdf.AsdfFile(ff)
assert not ff.tree['my_array'] is ff2.tree['my_array']
assert_array_equal(ff.tree['my_array'], ff2.tree['my_array'])
assert ff.blocks[my_array] != ff2.blocks[my_array]
ff2.tree['my_array'] = None
assert_array_equal(ff.tree['my_array'], my_array)
def test_update_exceptions(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
with asdf.AsdfFile(tree) as ff:
ff.write_to(path)
with asdf.AsdfFile().read(path) as ff:
with pytest.raises(IOError):
ff.update()
with asdf.AsdfFile(tree) as ff:
buff = io.BytesIO()
ff.write_to(buff)
ff.update()
with pytest.raises(ValueError):
asdf.AsdfFile().update()
def test_get_data_from_closed_file(tmpdir):
tmpdir = str(tmpdir)
path = os.path.join(tmpdir, 'test.asdf')
my_array = np.random.rand(8, 8)
tree = {'my_array': my_array}
with asdf.AsdfFile(tree) as ff:
ff.write_to(path)
with asdf.AsdfFile().read(path) as ff:
pass
with pytest.raises(IOError):
assert_array_equal(my_array, ff.tree['my_array'])
| 28.750374 | 132 | 0.63341 | 2,984 | 19,234 | 3.935992 | 0.072051 | 0.049468 | 0.043423 | 0.062665 | 0.832695 | 0.816433 | 0.797871 | 0.784078 | 0.771392 | 0.73725 | 0 | 0.022215 | 0.215972 | 19,234 | 668 | 133 | 28.793413 | 0.756631 | 0.063481 | 0 | 0.702882 | 0 | 0.002217 | 0.104947 | 0.010506 | 0 | 0 | 0 | 0 | 0.159645 | 1 | 0.073171 | false | 0.011086 | 0.019956 | 0.002217 | 0.097561 | 0.004435 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
75432879ef11cfda72029967d2dfde1a94ae3395 | 195 | py | Python | tradesystem/messages/message_bus_info.py | long20p/SecuritiesExchange | ac5999e4ce173dd6327bef5725689f930a4e5d5d | [
"MIT"
] | null | null | null | tradesystem/messages/message_bus_info.py | long20p/SecuritiesExchange | ac5999e4ce173dd6327bef5725689f930a4e5d5d | [
"MIT"
] | null | null | null | tradesystem/messages/message_bus_info.py | long20p/SecuritiesExchange | ac5999e4ce173dd6327bef5725689f930a4e5d5d | [
"MIT"
] | null | null | null | class MessageBusInfo:
@staticmethod
def new_order_queue_name():
return 'new_order_queue'
@staticmethod
def cancel_order_queue_name():
return 'cancel_order_queue' | 21.666667 | 35 | 0.702564 | 22 | 195 | 5.772727 | 0.454545 | 0.314961 | 0.204724 | 0.314961 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 195 | 9 | 35 | 21.666667 | 0.846667 | 0 | 0 | 0.285714 | 0 | 0 | 0.168367 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | true | 0 | 0 | 0.285714 | 0.714286 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 9 |
f34e252129d52f37b1972486b36718576c2480dd | 23,766 | py | Python | swagger_client/apis/userprofile_api.py | rcbops/qtest-swagger-client | 28220aa95d878922ca4b35c325706932adabea4e | [
"Apache-2.0"
] | 1 | 2019-09-10T17:55:53.000Z | 2019-09-10T17:55:53.000Z | swagger_client/apis/userprofile_api.py | rcbops/qtest-swagger-client | 28220aa95d878922ca4b35c325706932adabea4e | [
"Apache-2.0"
] | null | null | null | swagger_client/apis/userprofile_api.py | rcbops/qtest-swagger-client | 28220aa95d878922ca4b35c325706932adabea4e | [
"Apache-2.0"
] | 2 | 2019-02-12T23:15:10.000Z | 2022-03-11T20:08:28.000Z | # coding: utf-8
"""
qTest Manager API Version 8.6 - 9.1
qTest Manager API Version 8.6 - 9.1
OpenAPI spec version: 8.6 - 9.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UserprofileApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def batch_assign_user_profile_in_client(self, body, **kwargs):
"""
Assigns multiple Users to a Profile
To batch assign users to a profile (Admin profile, User profile) (as in qTest Manager <em>admin panel</em>). It requires that your qTest Manager profile is a site admin with <em>Manage Client Users</em> permissions <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.batch_assign_user_profile_in_client(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SiteUsersProfile body: An array of user IDs and admin and/or normal user profile (required)
:return: list[UserResourceExtension]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.batch_assign_user_profile_in_client_with_http_info(body, **kwargs)
else:
(data) = self.batch_assign_user_profile_in_client_with_http_info(body, **kwargs)
return data
def batch_assign_user_profile_in_client_with_http_info(self, body, **kwargs):
"""
Assigns multiple Users to a Profile
To batch assign users to a profile (Admin profile, User profile) (as in qTest Manager <em>admin panel</em>). It requires that your qTest Manager profile is a site admin with <em>Manage Client Users</em> permissions <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.batch_assign_user_profile_in_client_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SiteUsersProfile body: An array of user IDs and admin and/or normal user profile (required)
:return: list[UserResourceExtension]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method batch_assign_user_profile_in_client" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `batch_assign_user_profile_in_client`")
collection_formats = {}
resource_path = '/api/v3/user-profiles/batch-assign-users'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Authorization']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserResourceExtension]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def batch_assign_user_profile_in_project(self, project_id, body, **kwargs):
"""
Assigns multiple Users to a Profile in a Project
To change profile of assigned users (as a project's User Management page). It requires that your qTest Manager profile within the project is Project Admin <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.batch_assign_user_profile_in_project(project_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: ID of the project (required)
:param ProjectUsersProfile body: An array of user IDs and a user profile (required)
:return: list[UserResourceExtension]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.batch_assign_user_profile_in_project_with_http_info(project_id, body, **kwargs)
else:
(data) = self.batch_assign_user_profile_in_project_with_http_info(project_id, body, **kwargs)
return data
def batch_assign_user_profile_in_project_with_http_info(self, project_id, body, **kwargs):
"""
Assigns multiple Users to a Profile in a Project
To change profile of assigned users (as a project's User Management page). It requires that your qTest Manager profile within the project is Project Admin <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.batch_assign_user_profile_in_project_with_http_info(project_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: ID of the project (required)
:param ProjectUsersProfile body: An array of user IDs and a user profile (required)
:return: list[UserResourceExtension]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method batch_assign_user_profile_in_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `batch_assign_user_profile_in_project`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `batch_assign_user_profile_in_project`")
collection_formats = {}
resource_path = '/api/v3/user-profiles/{projectId}/batch-assign-users'.replace('{format}', 'json')
path_params = {}
if 'project_id' in params:
path_params['projectId'] = params['project_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Authorization']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserResourceExtension]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_current(self, **kwargs):
"""
Gets current User's Admin Profile
To retrieve your Admin Profile <strong>qTest Manager version:</strong> 4+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_current(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AdminProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_current_with_http_info(**kwargs)
else:
(data) = self.get_current_with_http_info(**kwargs)
return data
def get_current_with_http_info(self, **kwargs):
"""
Gets current User's Admin Profile
To retrieve your Admin Profile <strong>qTest Manager version:</strong> 4+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_current_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AdminProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_current" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/api/v3/admin-profiles/current'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Authorization']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminProfile',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_profiles_of_current_user(self, **kwargs):
"""
Gets current User's Profiles in different Projects
To retrieve your User Profiles in different Projects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_profiles_of_current_user(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[UserProfile]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_profiles_of_current_user_with_http_info(**kwargs)
else:
(data) = self.get_profiles_of_current_user_with_http_info(**kwargs)
return data
def get_profiles_of_current_user_with_http_info(self, **kwargs):
"""
Gets current User's Profiles in different Projects
To retrieve your User Profiles in different Projects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_profiles_of_current_user_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[UserProfile]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_profiles_of_current_user" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/api/v3/user-profiles/current'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Authorization']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserProfile]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_profiles(self, **kwargs):
"""
Gets available Profiles
To retrieve all available profiles in your qTest Manager instance. It requires that your qTest Manager profile is a site admin with <em>View User Profiles</em> permissions <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_profiles(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: <em>type=admin</em> - to retrieve only admin profiles <em>type=use</em> - to retrieve only normal user profiles Omit this parameter to include both
:return: UserProfileResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_profiles_with_http_info(**kwargs)
else:
(data) = self.get_user_profiles_with_http_info(**kwargs)
return data
def get_user_profiles_with_http_info(self, **kwargs):
"""
Gets available Profiles
To retrieve all available profiles in your qTest Manager instance. It requires that your qTest Manager profile is a site admin with <em>View User Profiles</em> permissions <strong>qTest Manager version:</strong> 8.4.2+
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_profiles_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: <em>type=admin</em> - to retrieve only admin profiles <em>type=use</em> - to retrieve only normal user profiles Omit this parameter to include both
:return: UserProfileResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_profiles" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/api/v3/user-profiles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'type' in params:
query_params['type'] = params['type']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Authorization']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserProfileResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.268571 | 271 | 0.569848 | 2,459 | 23,766 | 5.283449 | 0.081334 | 0.061576 | 0.021552 | 0.027709 | 0.949661 | 0.940117 | 0.935191 | 0.920413 | 0.881696 | 0.877001 | 0 | 0.002614 | 0.356097 | 23,766 | 524 | 272 | 45.354962 | 0.84637 | 0.368678 | 0 | 0.74902 | 1 | 0 | 0.153599 | 0.058689 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043137 | false | 0 | 0.027451 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
34099f2c6e774790af0850dc7f754bf26fd6e7c5 | 2,182 | py | Python | sphinxcontrib_session/test.py | SymbiFlow/sphinxcontrib-session | 8ec81a80cc68aaba76911d803ce49c9bed8fd8e8 | [
"Apache-2.0"
] | null | null | null | sphinxcontrib_session/test.py | SymbiFlow/sphinxcontrib-session | 8ec81a80cc68aaba76911d803ce49c9bed8fd8e8 | [
"Apache-2.0"
] | 1 | 2019-12-27T19:20:33.000Z | 2019-12-27T19:20:33.000Z | sphinxcontrib_session/test.py | mithro/sphinxcontrib-session | f9e959a696ba02d874ffc291a3b83a4458c1bad6 | [
"Apache-2.0"
] | 2 | 2020-02-01T15:43:19.000Z | 2020-02-22T00:11:39.000Z | #!/usr/bin/env python3
import unittest
from sphinxcontrib_session import rewrite_spans
class TestCase(unittest.TestCase):
maxDiff=None
def testRewrite(self):
TEST_CASES = [
("""
<span class="gp">>>> </span><span class="n">h</span> <span class="o">=</span> <span class="s1">'hello'</span>
<span class="gp">>>> </span><span class="nb">print</span><span class="p">(</span><span class="s1">'hello'</span><span class="p">)</span>
<span class="go">hello</span>
<span class="gp">>>> </span><span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">hello</span><span class="p">:</span>
<span class="gp">... </span> <span class="nb">print</span><span class="p">(</span><span class="n">i</span><span class="p">)</span>
<span class="go">h</span>
<span class="go">e</span>
<span class="go">l</span>
<span class="go">l</span>
<span class="go">o</span>
<span class="go">>>></span>
""", """
<span class="gp" data-content=">>> "></span><span class="n">h</span> <span class="o">=</span> <span class="s1">'hello'</span>
<span class="gp" data-content=">>> "></span><span class="nb">print</span><span class="p">(</span><span class="s1">'hello'</span><span class="p">)</span>
<span class="go" data-content="hello
"></span><span class="gp" data-content=">>> "></span><span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">hello</span><span class="p">:</span>
<span class="gp" data-content="... "></span> <span class="nb">print</span><span class="p">(</span><span class="n">i</span><span class="p">)</span>
<span class="go" data-content="h
"></span><span class="go" data-content="e
"></span><span class="go" data-content="l
"></span><span class="go" data-content="l
"></span><span class="go" data-content="o
"></span><span class="go" data-content=">>>
"></span>"""),
]
for before, expected_after in TEST_CASES:
actual_after = rewrite_spans(before)
self.assertMultiLineEqual(expected_after, actual_after)
if __name__ == "__main__":
unittest.main()
| 47.434783 | 193 | 0.617324 | 348 | 2,182 | 3.821839 | 0.143678 | 0.365414 | 0.518045 | 0.157895 | 0.796241 | 0.761654 | 0.702256 | 0.702256 | 0.702256 | 0.658647 | 0 | 0.010703 | 0.100825 | 2,182 | 45 | 194 | 48.488889 | 0.667176 | 0.009624 | 0 | 0.108108 | 0 | 0.216216 | 0.809259 | 0.311111 | 0 | 0 | 0 | 0 | 0.027027 | 1 | 0.027027 | false | 0 | 0.054054 | 0 | 0.135135 | 0.108108 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
341fbf9163af6834d22378af55840b0fdc1c9824 | 146 | py | Python | ENV/lib/python3.5/site-packages/preprocessor/__init__.py | block1o1/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 4 | 2021-10-14T21:22:25.000Z | 2022-03-12T19:58:48.000Z | ENV/lib/python3.5/site-packages/preprocessor/__init__.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | null | null | null | ENV/lib/python3.5/site-packages/preprocessor/__init__.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 1 | 2022-03-15T22:52:53.000Z | 2022-03-15T22:52:53.000Z | from .api import clean, tokenize, parse, set_options
from .defines import Options as OPT
__all__ = ['clean', 'tokenize', 'parse', 'set_options']
| 29.2 | 55 | 0.732877 | 20 | 146 | 5.05 | 0.6 | 0.257426 | 0.356436 | 0.415842 | 0.554455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136986 | 146 | 4 | 56 | 36.5 | 0.801587 | 0 | 0 | 0 | 0 | 0 | 0.19863 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
caeb3602cab5e77d0a85dc9716d36bb94081c8a3 | 5,713 | py | Python | conans/test/integration/generators/markdown_test.py | ShuangLiu1992/conan | b420ec1601febfa97f1f61d8da9ba083928ca7ea | [
"MIT"
] | null | null | null | conans/test/integration/generators/markdown_test.py | ShuangLiu1992/conan | b420ec1601febfa97f1f61d8da9ba083928ca7ea | [
"MIT"
] | null | null | null | conans/test/integration/generators/markdown_test.py | ShuangLiu1992/conan | b420ec1601febfa97f1f61d8da9ba083928ca7ea | [
"MIT"
] | null | null | null | import textwrap
import unittest
from conans.test.utils.tools import TestClient
class MarkDownGeneratorTest(unittest.TestCase):
def test_cmake_find_filename(self):
conanfile = textwrap.dedent("""
from conans import ConanFile
class HelloConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "FooBar")
self.cpp_info.set_property("cmake_target_name", "foobar")
self.cpp_info.set_property("pkg_config_name", "foobar_cfg")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertIn("find_package(FooBar)", content)
self.assertIn("target_link_libraries(<target_name> foobar)", content)
def test_cmake_find_filename_with_namespace(self):
conanfile = textwrap.dedent("""
from conans import ConanFile
class HelloConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "FooBar")
self.cpp_info.set_property("cmake_target_name", "foobar::foobar")
self.cpp_info.set_property("pkg_config_name", "foobar_cfg")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertIn("find_package(FooBar)", content)
self.assertIn("target_link_libraries(<target_name> foobar::foobar)", content)
def test_with_build_modules(self):
conanfile = textwrap.dedent("""
import os
from conans import ConanFile
class HelloConan(ConanFile):
exports_sources = 'bm.cmake'
def package(self):
self.copy('bm.cmake', dst='lib/cmake')
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "FooBar")
self.cpp_info.set_property("cmake_target_name", "foobar")
self.cpp_info.set_property("pkg_config_name", "foobar_cfg")
self.cpp_info.set_property('cmake_build_modules', ['lib/cmake/bm.cmake'])
""")
client = TestClient()
client.save({"conanfile.py": conanfile,
"bm.cmake": "Content of build_module" })
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertIn("* `lib/cmake/bm.cmake`", content)
self.assertIn("Content of build_module", content)
def test_no_components(self):
conanfile = textwrap.dedent("""
import os
from conans import ConanFile
class HelloConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("cmake_target_name", "foobar")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertNotIn("Or link just one of its components", content)
self.assertNotIn("Declared components", content)
def test_with_components(self):
conanfile = textwrap.dedent("""
import os
from conans import ConanFile
class HelloConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("cmake_target_name", "foobar")
self.cpp_info.components["component1"].set_property("cmake_target_name", "foobar::component_name")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertIn("target_link_libraries(<target_name> foobar::component_name)", content)
self.assertIn("* Component ``foobar::component_name``", content)
def test_with_components_and_target_namespace(self):
conanfile = textwrap.dedent("""
import os
from conans import ConanFile
class HelloConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("cmake_target_name", "namespace::name")
self.cpp_info.components["component1"].set_property("cmake_target_name", "namespace::component_name")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . bar/0.1.0@user/testing")
client.run("install bar/0.1.0@user/testing -g markdown")
content = client.load("bar.md")
self.assertIn("target_link_libraries(<target_name> namespace::component_name)", content)
self.assertIn("* Component ``namespace::component_name``", content)
| 44.984252 | 129 | 0.566953 | 604 | 5,713 | 5.177152 | 0.124172 | 0.033579 | 0.052766 | 0.058203 | 0.845859 | 0.807163 | 0.764631 | 0.74992 | 0.748001 | 0.748001 | 0 | 0.009726 | 0.316121 | 5,713 | 126 | 130 | 45.34127 | 0.790632 | 0 | 0 | 0.714286 | 0 | 0 | 0.649221 | 0.259408 | 0 | 0 | 0 | 0 | 0.114286 | 1 | 0.057143 | false | 0 | 0.12381 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1b16dedc5ebf20cba2b30c8c722fafcb5277b623 | 127 | py | Python | strix/file_formats/__init__.py | HFM3/strix | 94bbc568f614bbb0f525d8ce17de4c64ef3b46d2 | [
"MIT"
] | null | null | null | strix/file_formats/__init__.py | HFM3/strix | 94bbc568f614bbb0f525d8ce17de4c64ef3b46d2 | [
"MIT"
] | null | null | null | strix/file_formats/__init__.py | HFM3/strix | 94bbc568f614bbb0f525d8ce17de4c64ef3b46d2 | [
"MIT"
] | null | null | null | # import strix.file_formats.egf as egf
# import strix.file_formats.csv as csv
# import strix.file_formats.geo_json as geo_json
| 31.75 | 48 | 0.811024 | 23 | 127 | 4.26087 | 0.391304 | 0.336735 | 0.459184 | 0.673469 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11811 | 127 | 3 | 49 | 42.333333 | 0.875 | 0.944882 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1b256a410d301ce15de16985ebe124cdb786218e | 5,445 | py | Python | interval.py | CatBia/pixelsort | 7504c6baf9cf9fd77381a70696de26f24f9fe7f1 | [
"MIT"
] | 1 | 2019-02-08T15:42:31.000Z | 2019-02-08T15:42:31.000Z | interval.py | CatBia/pixelsort | 7504c6baf9cf9fd77381a70696de26f24f9fe7f1 | [
"MIT"
] | null | null | null | interval.py | CatBia/pixelsort | 7504c6baf9cf9fd77381a70696de26f24f9fe7f1 | [
"MIT"
] | null | null | null | try:
import Image
import ImageFilter
except ImportError:
from PIL import Image, ImageFilter
import random as rand
import constants
import util
def edge(pixels, args):
img = Image.open(args.image_input_path)
img = img.rotate(args.angle, expand=True)
edges = img.filter(ImageFilter.FIND_EDGES)
edges = edges.convert('RGBA')
edge_data = edges.load()
filter_pixels = []
edge_pixels = []
intervals = []
print("Defining edges...")
for y in range(img.size[1]):
filter_pixels.append([])
for x in range(img.size[0]):
filter_pixels[y].append(edge_data[x, y])
print("Thresholding...")
for y in range(len(pixels)):
edge_pixels.append([])
for x in range(len(pixels[0])):
if util.lightness(filter_pixels[y][x]) < args.bottom_threshold:
edge_pixels[y].append(constants.white_pixel)
else:
edge_pixels[y].append(constants.black_pixel)
print("Cleaning up edges...")
for y in range(len(pixels) - 1, 1, -1):
for x in range(len(pixels[0]) - 1, 1, -1):
if edge_pixels[y][x] == constants.black_pixel and edge_pixels[y][x - 1] == constants.black_pixel:
edge_pixels[y][x] = constants.white_pixel
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
for x in range(len(pixels[0])):
if edge_pixels[y][x] == constants.black_pixel:
intervals[y].append(x)
intervals[y].append(len(pixels[0]))
return intervals
def threshold(pixels, args):
intervals = []
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
for x in range(len(pixels[0])):
if util.lightness(pixels[y][x]) < args.bottom_threshold or util.lightness(pixels[y][x]) > args.upper_threshold:
intervals[y].append(x)
intervals[y].append(len(pixels[0]))
return intervals
def random(pixels, args):
intervals = []
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
x = 0
while True:
width = util.random_width(args.clength)
x += width
if x > len(pixels[0]):
intervals[y].append(len(pixels[0]))
break
else:
intervals[y].append(x)
return intervals
def waves(pixels, args):
intervals = []
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
x = 0
while True:
width = args.clength + rand.randint(0, 10)
x += width
if x > len(pixels[0]):
intervals[y].append(len(pixels[0]))
break
else:
intervals[y].append(x)
return intervals
def file_mask(pixels, args):
intervals = []
file_pixels = []
img = Image.open(args.interval_file_path)
img = img.convert('RGBA')
data = img.load()
for y in range(img.size[1]):
file_pixels.append([])
for x in range(img.size[0]):
file_pixels[y].append(data[x, y])
print("Cleaning up edges...")
for y in range(len(pixels) - 1, 1, -1):
for x in range(len(pixels[0]) - 1, 1, -1):
if file_pixels[y][x] == constants.black_pixel and file_pixels[y][x - 1] == constants.black_pixel:
file_pixels[y][x] = constants.white_pixel
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
for x in range(len(pixels[0])):
if file_pixels[y][x] == constants.black_pixel:
intervals[y].append(x)
intervals[y].append(len(pixels[0]))
return intervals
def file_edges(pixels, args):
img = Image.open(args.interval_file_path)
img = img.rotate(args.angle, expand=True)
img = img.resize((len(pixels[0]), len(pixels)), Image.ANTIALIAS)
edges = img.filter(ImageFilter.FIND_EDGES)
edges = edges.convert('RGBA')
edge_data = edges.load()
filter_pixels = []
edge_pixels = []
intervals = []
print("Defining edges...")
for y in range(img.size[1]):
filter_pixels.append([])
for x in range(img.size[0]):
filter_pixels[y].append(edge_data[x, y])
print("Thresholding...")
for y in range(len(pixels)):
edge_pixels.append([])
for x in range(len(pixels[0])):
if util.lightness(filter_pixels[y][x]) < args.bottom_threshold:
edge_pixels[y].append(constants.white_pixel)
else:
edge_pixels[y].append(constants.black_pixel)
print("Cleaning up edges...")
for y in range(len(pixels) - 1, 1, -1):
for x in range(len(pixels[0]) - 1, 1, -1):
if edge_pixels[y][x] == constants.black_pixel and edge_pixels[y][x - 1] == constants.black_pixel:
edge_pixels[y][x] = constants.white_pixel
print("Defining intervals...")
for y in range(len(pixels)):
intervals.append([])
for x in range(len(pixels[0])):
if edge_pixels[y][x] == constants.black_pixel:
intervals[y].append(x)
intervals[y].append(len(pixels[0]))
return intervals
def none(pixels, args):
intervals = []
for y in range(len(pixels)):
intervals.append([len(pixels[y])])
return intervals
| 30.418994 | 123 | 0.57741 | 714 | 5,445 | 4.310924 | 0.102241 | 0.093567 | 0.068226 | 0.109162 | 0.867771 | 0.866472 | 0.837882 | 0.821637 | 0.790773 | 0.741715 | 0 | 0.012723 | 0.278237 | 5,445 | 178 | 124 | 30.589888 | 0.770483 | 0 | 0 | 0.802721 | 0 | 0 | 0.048118 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.047619 | 0 | 0.142857 | 0.088435 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
945cbd89cb1a36ed495ccaa59d212af89ec44942 | 2,438 | py | Python | pynes/tests/cpx_test.py | BmanisKing/Mine | d33fc14c84af336ae16fefda2dcebc9f0e1e12ab | [
"BSD-3-Clause"
] | null | null | null | pynes/tests/cpx_test.py | BmanisKing/Mine | d33fc14c84af336ae16fefda2dcebc9f0e1e12ab | [
"BSD-3-Clause"
] | null | null | null | pynes/tests/cpx_test.py | BmanisKing/Mine | d33fc14c84af336ae16fefda2dcebc9f0e1e12ab | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
'''
CPX, Compare with X Test
'''
import unittest
from pynes.compiler import lexical, syntax, semantic
class CpxTest(unittest.TestCase):
def test_cpx_imm(self):
tokens = list(lexical('CPX #$10'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_HEX_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0xe0, 0x10])
def test_cpx_imm_with_decimal(self):
tokens = list(lexical('CPX #10'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_DECIMAL_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0xe0, 0x0a])
def test_cpx_imm_with_binary(self):
tokens = list(lexical('CPX #%00000100'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_BINARY_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0xe0, 0x04])
def test_cpx_zp(self):
tokens = list(lexical('CPX $00'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ZEROPAGE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0xe4, 0x00])
def test_cpx_abs(self):
tokens = list(lexical('CPX $1234'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
self.assertEquals('$1234', tokens[1]['value'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ABSOLUTE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0xec, 0x34, 0x12])
| 35.852941 | 64 | 0.611977 | 295 | 2,438 | 4.949153 | 0.2 | 0.339726 | 0.150685 | 0.071918 | 0.810959 | 0.738356 | 0.738356 | 0.738356 | 0.738356 | 0.679452 | 0 | 0.039662 | 0.224364 | 2,438 | 67 | 65 | 36.38806 | 0.732417 | 0.019278 | 0 | 0.555556 | 0 | 0 | 0.123374 | 0 | 0 | 0 | 0.018464 | 0 | 0.574074 | 1 | 0.092593 | false | 0 | 0.037037 | 0 | 0.148148 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
94716b18f3069f4e2e455aebfdc237a9ae7c54e4 | 3,162 | py | Python | tests/test_yen_parser.py | kendimaru/yen-parser | f15d5c6a481beff0e10855204cdd0bbaa7a6040e | [
"MIT"
] | null | null | null | tests/test_yen_parser.py | kendimaru/yen-parser | f15d5c6a481beff0e10855204cdd0bbaa7a6040e | [
"MIT"
] | null | null | null | tests/test_yen_parser.py | kendimaru/yen-parser | f15d5c6a481beff0e10855204cdd0bbaa7a6040e | [
"MIT"
] | null | null | null | import pytest
from yen_parser import __version__
from yen_parser import parse_yen
def test_version():
assert __version__ == '0.1.0'
def test_only_numeric():
assert parse_yen("0") == 0
assert parse_yen("1") == 1
assert parse_yen("21") == 21
assert parse_yen("321") == 321
assert parse_yen("4321") == 4_321
assert parse_yen("54321") == 54_321
assert parse_yen("654321") == 654_321
assert parse_yen("7654321") == 7_654_321
assert parse_yen("87654321") == 87_654_321
assert parse_yen("987654321") == 987_654_321
assert parse_yen("1987654321") == 1_987_654_321
def test_with_comma():
assert parse_yen("4,321") == 4_321
assert parse_yen("54,321") == 54_321
assert parse_yen("654,321") == 654_321
assert parse_yen("7,654,321") == 7_654_321
assert parse_yen("87,654,321") == 87_654_321
assert parse_yen("987,654,321") == 987_654_321
assert parse_yen("1,987,654,321") == 1_987_654_321
def test_with_currency_symbol():
assert parse_yen("¥1") == 1
assert parse_yen("¥21") == 21
assert parse_yen("¥321") == 321
assert parse_yen("¥4321") == 4_321
assert parse_yen("¥54321") == 54_321
assert parse_yen("¥654321") == 654_321
assert parse_yen("¥7654321") == 7_654_321
assert parse_yen("¥87654321") == 87_654_321
assert parse_yen("¥987654321") == 987_654_321
assert parse_yen("¥1987654321") == 1_987_654_321
def test_with_comma_and_currency_symbol():
assert parse_yen("¥4,321") == 4_321
assert parse_yen("¥54,321") == 54_321
assert parse_yen("¥654,321") == 654_321
assert parse_yen("¥7,654,321") == 7_654_321
assert parse_yen("¥87,654,321") == 87_654_321
assert parse_yen("¥987,654,321") == 987_654_321
assert parse_yen("¥1,987,654,321") == 1_987_654_321
def test_non():
with pytest.raises(TypeError):
parse_yen(None)
def test_not_str():
with pytest.raises(TypeError):
parse_yen(b'1')
with pytest.raises(TypeError):
parse_yen(1)
def test_empty():
with pytest.raises(ValueError):
parse_yen("")
def test_blank():
with pytest.raises(ValueError):
parse_yen(" ")
with pytest.raises(ValueError):
parse_yen("\t")
def test_with_currency_symbol_not_be_ware_of():
with pytest.raises(ValueError):
parse_yen("$4,321")
def test_illegal_position_commas():
with pytest.raises(ValueError):
parse_yen("43,21")
with pytest.raises(ValueError):
parse_yen("5,4321")
def test_illegal_position_currency_symbol():
with pytest.raises(ValueError):
parse_yen("4,321¥")
def test_contain_blank_characters():
with pytest.raises(ValueError):
parse_yen(" ¥4,321")
with pytest.raises(ValueError):
parse_yen("¥4,321 ")
with pytest.raises(ValueError):
parse_yen(" 4321")
with pytest.raises(ValueError):
parse_yen("4321 ")
with pytest.raises(ValueError):
parse_yen("\t4321")
def test_contain_decimal_point_characters():
with pytest.raises(ValueError):
parse_yen("1.1")
with pytest.raises(ValueError):
parse_yen("4.321")
| 23.422222 | 55 | 0.659077 | 478 | 3,162 | 4.087866 | 0.123431 | 0.216991 | 0.250768 | 0.226203 | 0.822416 | 0.753838 | 0.509724 | 0.311157 | 0.252815 | 0.123849 | 0 | 0.184263 | 0.20019 | 3,162 | 134 | 56 | 23.597015 | 0.580467 | 0 | 0 | 0.264368 | 0 | 0 | 0.101234 | 0 | 0 | 0 | 0 | 0 | 0.413793 | 1 | 0.16092 | true | 0 | 0.034483 | 0 | 0.195402 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
84bed359b2b1bd562b2b53cb3ea00143b807755a | 39 | py | Python | generateattckdocs/generateattckdocs/__init__.py | timb-machine/pyattck | 1636c9191a92fa28e2cc03f8f04b85195070f0b9 | [
"MIT"
] | null | null | null | generateattckdocs/generateattckdocs/__init__.py | timb-machine/pyattck | 1636c9191a92fa28e2cc03f8f04b85195070f0b9 | [
"MIT"
] | null | null | null | generateattckdocs/generateattckdocs/__init__.py | timb-machine/pyattck | 1636c9191a92fa28e2cc03f8f04b85195070f0b9 | [
"MIT"
] | null | null | null | from .generatedocs import GenerateDocs
| 19.5 | 38 | 0.871795 | 4 | 39 | 8.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102564 | 39 | 1 | 39 | 39 | 0.971429 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
84c31751d52945c2cfea669190c9b3bd6fd52c8c | 223 | py | Python | src/odyssey_tests/test_data/package_one/module_four_compatibility/module_four_37.py | GodwinneLorayne/odyssey | b5576818d70bea011772b944a4dd947777a5ac2f | [
"MIT"
] | 1 | 2020-06-01T20:52:37.000Z | 2020-06-01T20:52:37.000Z | src/odyssey_tests/test_data/package_one/module_four_compatibility/module_four_37.py | GodwinneLorayne/odyssey | b5576818d70bea011772b944a4dd947777a5ac2f | [
"MIT"
] | 4 | 2020-06-06T04:50:24.000Z | 2021-02-03T07:14:49.000Z | src/odyssey_tests/test_data/package_one/module_four_compatibility/module_four_37.py | python-odyssey/odyssey | b5576818d70bea011772b944a4dd947777a5ac2f | [
"MIT"
] | 1 | 2020-05-30T21:59:11.000Z | 2020-05-30T21:59:11.000Z | def parameter_kind_function(
positional_or_keyword, *var_positional, keyword_only, **var_keyword,
):
return (
positional_or_keyword,
*var_positional,
keyword_only,
var_keyword,
)
| 22.3 | 72 | 0.663677 | 23 | 223 | 5.913043 | 0.434783 | 0.176471 | 0.279412 | 0.323529 | 0.779412 | 0.779412 | 0.779412 | 0.779412 | 0.779412 | 0 | 0 | 0 | 0.26009 | 223 | 9 | 73 | 24.777778 | 0.824242 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0.111111 | 0.222222 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 9 |
84c4ca137e1b4cd7eb2d5e0ced9da56a1f8c1d37 | 176 | py | Python | app/routes.py | MichielVanthoor/habit-tracker | bfc86cae204047dd8775f6f75c78050cbe0d2390 | [
"Apache-2.0"
] | null | null | null | app/routes.py | MichielVanthoor/habit-tracker | bfc86cae204047dd8775f6f75c78050cbe0d2390 | [
"Apache-2.0"
] | null | null | null | app/routes.py | MichielVanthoor/habit-tracker | bfc86cae204047dd8775f6f75c78050cbe0d2390 | [
"Apache-2.0"
] | null | null | null | from app import app
from flask import render_template
# Overview of all the routes
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
| 17.6 | 40 | 0.721591 | 26 | 176 | 4.807692 | 0.615385 | 0.224 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147727 | 176 | 9 | 41 | 19.555556 | 0.833333 | 0.147727 | 0 | 0 | 0 | 0 | 0.114865 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | true | 0 | 0.333333 | 0.166667 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
84de3b09b03a247743be16591e5ad877524ae53b | 173 | py | Python | codewars/8kyu/doha22/kata8/double_int/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | null | null | null | codewars/8kyu/doha22/kata8/double_int/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | 2 | 2019-01-22T10:53:42.000Z | 2019-01-31T08:02:48.000Z | codewars/8kyu/doha22/kata8/double_int/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | 13 | 2019-01-22T10:37:42.000Z | 2019-01-25T13:30:43.000Z | import unittest
from double_int import double_integer
def test_DNAtoRNA(benchmark):
assert benchmark(double_integer,2) == 4
assert benchmark(double_integer,3) == 6
| 24.714286 | 43 | 0.780347 | 24 | 173 | 5.416667 | 0.625 | 0.3 | 0.323077 | 0.430769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027027 | 0.144509 | 173 | 6 | 44 | 28.833333 | 0.851351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0.2 | false | 0 | 0.4 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
84f6d76895522fed245345955541ee829ca3edca | 16,660 | py | Python | model_compiler/tests/model_compiler/compilers/test_keras_model_to_tf_model.py | yuanliya/Adlik | 602074b44064002fc0bb054e17a989a5bcf22e92 | [
"Apache-2.0"
] | 548 | 2019-09-27T07:37:47.000Z | 2022-03-31T05:12:38.000Z | model_compiler/tests/model_compiler/compilers/test_keras_model_to_tf_model.py | yuanliya/Adlik | 602074b44064002fc0bb054e17a989a5bcf22e92 | [
"Apache-2.0"
] | 533 | 2019-09-27T06:30:41.000Z | 2022-03-29T07:34:08.000Z | model_compiler/tests/model_compiler/compilers/test_keras_model_to_tf_model.py | yuanliya/Adlik | 602074b44064002fc0bb054e17a989a5bcf22e92 | [
"Apache-2.0"
] | 54 | 2019-10-10T02:19:31.000Z | 2021-12-28T03:37:45.000Z | # Copyright 2019 ZTE corporation. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
import tensorflow as tf
from tensorflow import keras
import model_compiler.compilers.keras_model_to_tf_model as compiler
from model_compiler.compilers.keras_model_to_tf_model import KerasModel
from model_compiler.keras_util import Config, NodeSpec
from model_compiler.models.irs.tf_model import DataFormat
class NodeSpecTestCase(TestCase):
def test_name_only(self):
self.assertEqual(NodeSpec.from_str('abc'), NodeSpec(layer_name='abc', node_index=0))
def test_name_and_node_index(self):
self.assertEqual(NodeSpec.from_str('abc:4'), NodeSpec(layer_name='abc', node_index=4))
def test_invalid_name(self):
with self.assertRaises(ValueError):
NodeSpec.from_str('abc:4:3')
class ConfigTestCase(TestCase):
def test_from_json_minimal(self):
config = Config.from_json({})
self.assertEqual(config, Config(input_nodes=None, output_nodes=None))
def test_from_json_input_names_only(self):
config = Config.from_json({'input_layer_names': ['abc', 'def:3']})
self.assertEqual(config, Config(input_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)],
output_nodes=None))
def test_from_json_output_names_only(self):
config = Config.from_json({'output_layer_names': ['abc', 'def:3']})
self.assertEqual(config, Config(input_nodes=None,
output_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)]))
def test_from_json_full(self):
config = Config.from_json({'input_layer_names': ['abc', 'def:3'],
'output_layer_names': ['ghi:2', 'jkl']})
self.assertEqual(config, Config(input_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)],
output_nodes=[NodeSpec(layer_name='ghi', node_index=2),
NodeSpec(layer_name='jkl')]))
def test_from_env_minimal(self):
config = Config.from_env({})
self.assertEqual(config, Config(input_nodes=None, output_nodes=None))
def test_from_env_input_names_only(self):
config = Config.from_env({'INPUT_LAYER_NAMES': 'abc,def:3'})
self.assertEqual(config, Config(input_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)],
output_nodes=None))
def test_from_env_output_names_only(self):
config = Config.from_env({'OUTPUT_LAYER_NAMES': 'abc,def:3'})
self.assertEqual(config, Config(input_nodes=None,
output_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)]))
def test_from_env_full(self):
config = Config.from_env({'INPUT_LAYER_NAMES': 'abc,def:3',
'OUTPUT_LAYER_NAMES': 'ghi:2,jkl'})
self.assertEqual(config, Config(input_nodes=[NodeSpec(layer_name='abc'),
NodeSpec(layer_name='def', node_index=3)],
output_nodes=[NodeSpec(layer_name='ghi', node_index=2),
NodeSpec(layer_name='jkl')]))
class CompileSourceTestCase(TestCase):
def test_compile_simple(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
model = KerasModel(model=keras.Sequential([keras.layers.Dense(units=4, input_shape=[8])]), session=session)
compiled = compiler.compile_source(source=model, config=Config())
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_simple_with_input_layer(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
model = KerasModel(model=keras.Sequential([keras.layers.InputLayer(input_shape=[8]),
keras.layers.Dense(units=4)]),
session=session)
compiled = compiler.compile_source(source=model, config=Config())
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_input_name(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
origin_model = keras.Sequential()
origin_model.add(keras.layers.Dense(units=8, name='l1', input_shape=(16,)))
origin_model.add(keras.layers.Dense(units=4, name='l2'))
origin_model.add(keras.layers.Dense(units=2, name='l3'))
model = KerasModel(model=origin_model, session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l2')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, origin_model.layers[1].input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_input_name_to_input_layer(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_tensor = keras.layers.Input(shape=[16], name='l0')
output_tensor = keras.layers.Dense(units=8)(input_tensor)
model = KerasModel(model=keras.Model(inputs=[input_tensor], outputs=[output_tensor]), session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_output_name(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
origin_model = keras.Sequential()
origin_model.add(keras.layers.Dense(units=8, name='l1', input_shape=(16,)))
origin_model.add(keras.layers.Dense(units=4, name='l2'))
origin_model.add(keras.layers.Dense(units=2, name='l3'))
model = KerasModel(model=origin_model, session=session)
compiled = compiler.compile_source(source=model, config=Config(output_nodes=[NodeSpec(layer_name='l2')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], origin_model.layers[1].output)
self.assertIs(compiled.session, session)
def test_compile_simple_with_data_format(self):
for (keras_data_format, data_format) in [('channels_first', DataFormat.CHANNELS_FIRST),
('channels_last', DataFormat.CHANNELS_LAST)]:
with self.subTest(data_format=keras_data_format):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
model = KerasModel(model=keras.Sequential([keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format=keras_data_format,
input_shape=(28, 28, 3))]),
session=session)
compiled = compiler.compile_source(source=model, config=Config())
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertEqual(compiled.inputs[0].data_format, data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_direct_layer_data_format(self):
for (keras_data_format, data_format) in [('channels_first', DataFormat.CHANNELS_FIRST),
('channels_last', DataFormat.CHANNELS_LAST)]:
with self.subTest(data_format=keras_data_format):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_x = keras.layers.Input(shape=(28, 28, 3), name='l0')
output_y = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format=keras_data_format,
name='l1')(input_x)
model = KerasModel(model=keras.Model(inputs=[input_x], outputs=[output_y]), session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertEqual(compiled.inputs[0].data_format, data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_input_layer_data_format(self):
for (keras_data_format, data_format) in [('channels_first', DataFormat.CHANNELS_FIRST),
('channels_last', DataFormat.CHANNELS_LAST)]:
with self.subTest(data_format=keras_data_format):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_x = keras.layers.Input(shape=(28, 28, 3), name='l0')
output_y = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format=keras_data_format,
name='l1')(input_x)
model = KerasModel(model=keras.Model(inputs=[input_x], outputs=[output_y]), session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertEqual(compiled.inputs[0].data_format, data_format)
self.assertEqual(len(compiled.outputs), 1)
self.assertIs(compiled.outputs[0], model.model.output)
self.assertIs(compiled.session, session)
def test_compile_with_consistent_data_format(self):
for (keras_data_format, data_format) in [('channels_first', DataFormat.CHANNELS_FIRST),
('channels_last', DataFormat.CHANNELS_LAST)]:
with self.subTest(data_format=keras_data_format):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_x = keras.layers.Input(shape=(28, 28, 3), name='l0')
output_y_1 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format=keras_data_format,
name='l1')(input_x)
output_y_2 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format=keras_data_format,
name='l2')(input_x)
model = KerasModel(model=keras.Model(inputs=[input_x], outputs=[output_y_1, output_y_2]),
session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertEqual(compiled.inputs[0].data_format, data_format)
self.assertEqual(len(compiled.outputs), 2)
self.assertIs(compiled.outputs[0], model.model.outputs[0])
self.assertIs(compiled.outputs[1], model.model.outputs[1])
self.assertIs(compiled.session, session)
def test_compile_with_conflicting_data_format_1(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_x = keras.layers.Input(shape=(28, 28, 3), name='l0')
input_y_1 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format='channels_first',
name='l1')(input_x)
input_y_2 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format='channels_last',
name='l2')(input_x)
model = KerasModel(model=keras.Model(inputs=[input_x], outputs=[input_y_1, input_y_2]), session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 2)
self.assertIs(compiled.outputs[0], model.model.outputs[0])
self.assertIs(compiled.outputs[1], model.model.outputs[1])
self.assertIs(compiled.session, session)
def test_compile_with_conflicting_data_format_2(self):
with tf.Graph().as_default(), tf.compat.v1.Session().as_default() as session:
input_x = keras.layers.Input(shape=(28, 28, 3), name='l0')
input_y_1 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format='channels_last',
name='l1')(input_x)
input_y_2 = keras.layers.Conv2D(filters=4,
kernel_size=(3, 3),
data_format='channels_first',
name='l2')(input_x)
model = KerasModel(model=keras.Model(inputs=[input_x], outputs=[input_y_1, input_y_2]), session=session)
compiled = compiler.compile_source(source=model, config=Config(input_nodes=[NodeSpec(layer_name='l0')]))
self.assertEqual(len(compiled.inputs), 1)
self.assertIs(compiled.inputs[0].tensor, model.model.input)
self.assertIsNone(compiled.inputs[0].data_format)
self.assertEqual(len(compiled.outputs), 2)
self.assertIs(compiled.outputs[0], model.model.outputs[0])
self.assertIs(compiled.outputs[1], model.model.outputs[1])
self.assertIs(compiled.session, session)
| 50.638298 | 120 | 0.583133 | 1,856 | 16,660 | 5.02694 | 0.060884 | 0.054662 | 0.07717 | 0.061308 | 0.924544 | 0.90686 | 0.900643 | 0.875563 | 0.866774 | 0.866774 | 0 | 0.019628 | 0.2997 | 16,660 | 328 | 121 | 50.792683 | 0.780063 | 0.005282 | 0 | 0.741525 | 0 | 0 | 0.029875 | 0 | 0 | 0 | 0 | 0 | 0.338983 | 1 | 0.09322 | false | 0 | 0.029661 | 0 | 0.135593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
84fe52d977f885df4d987c50d75534e88592e5ee | 172 | py | Python | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/parser_cache.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/parser_cache.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 1,978 | 2017-07-18T09:17:58.000Z | 2022-03-31T14:28:43.000Z | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/parser_cache.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 1,228 | 2017-07-18T09:03:13.000Z | 2022-03-29T05:57:40.000Z | from jedi.evaluate.cache import evaluator_function_cache
@evaluator_function_cache()
def get_yield_exprs(evaluator, funcdef):
return list(funcdef.iter_yield_exprs())
| 24.571429 | 56 | 0.825581 | 23 | 172 | 5.826087 | 0.652174 | 0.253731 | 0.328358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 172 | 6 | 57 | 28.666667 | 0.858974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
046a48cd20421a4d3b5e19f4ce1e4296162e3abe | 116,013 | py | Python | examples/TWATCH-2020/mono_fonts/inconsolata_64.py | jdtsmith/st7789_mpy | 1fdfc076861eff7bb1c318e0ae335026b3cb9cb1 | [
"MIT"
] | 153 | 2020-02-02T11:03:14.000Z | 2022-03-30T05:47:07.000Z | examples/TWATCH-2020/mono_fonts/inconsolata_64.py | skylin008/st7789_mpy | f304991fc5558be653df5f0de928494b85cbc60d | [
"MIT"
] | 58 | 2020-04-11T23:23:02.000Z | 2022-03-26T20:45:23.000Z | examples/TWATCH-2020/mono_fonts/inconsolata_64.py | skylin008/st7789_mpy | f304991fc5558be653df5f0de928494b85cbc60d | [
"MIT"
] | 50 | 2020-02-02T11:05:23.000Z | 2022-03-22T15:24:42.000Z | # coding=UTF8
# Converted from inconsolata-700.ttf using:
# python3 monofont2bitmap.py inconsolata-700.ttf 64 1 -c 0x20-0x7f -f white -b blue
HEIGHT = 66
WIDTH = 32
COLORS = 2
BITMAPS = 96
MAP = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
BPP = 1
PALETTE = [0xffff, 0x1f00]
_BITMAP =\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x3f\xff\xff\xf0\x1f\xff'\
b'\xff\xe0\x1f\xff\xff\xe0\x0f\xff\xff\xe0\x0f\xff\xff\xe0\x0f\xff'\
b'\xff\xe0\x0f\xff\xff\xe0\x0f\xff\xff\xe0\x0f\xff\xff\xe0\x0f\xff'\
b'\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x3f\xff\xff\xf0\x3f\xff\xff\xf0\x3f\xff\xff\xf0\x3f\xff'\
b'\xff\xf8\x3f\xff\xff\xf8\x3f\xff\xff\xf8\x3f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xf8\x3f\xff\xff\xf0\x1f\xff\xff\xe0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xc0\x07\xff\xff\xc0\x0f\xff\xff\xe0\x0f\xff\xff\xe0\x1f\xff'\
b'\xff\xf0\x3f\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff'\
b'\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff'\
b'\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff'\
b'\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x87\xe1\xff\xff\x87\xe1\xff'\
b'\xff\x87\xe1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xc0\xf8\x1f\xff\xc0\xf8\x1f\xff\xc0\xf8\x1f'\
b'\xff\x81\xf8\x1f\xff\x81\xf0\x1f\xff\x81\xf0\x1f\xff\x81\xf0\x3f'\
b'\xff\x81\xf0\x3f\xff\x81\xf0\x3f\xff\x81\xf0\x3f\xc0\x00\x00\x01'\
b'\xc0\x00\x00\x01\xc0\x00\x00\x01\xc0\x00\x00\x01\xc0\x00\x00\x01'\
b'\xff\x03\xe0\x7f\xff\x03\xe0\x7f\xff\x03\xe0\x7f\xff\x03\xe0\x7f'\
b'\xff\x03\xe0\x7f\xff\x07\xe0\x7f\xfe\x07\xc0\x7f\xfe\x07\xc0\x7f'\
b'\xfe\x07\xc0\x7f\x80\x00\x00\x03\x80\x00\x00\x03\x80\x00\x00\x03'\
b'\x80\x00\x00\x03\x80\x00\x00\x07\xfc\x0f\xc0\xff\xfc\x0f\xc0\xff'\
b'\xfc\x0f\x80\xff\xfc\x0f\x81\xff\xfc\x0f\x81\xff\xfc\x0f\x81\xff'\
b'\xfc\x0f\x81\xff\xfc\x0f\x81\xff\xf8\x1f\x81\xff\xf8\x1f\x01\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfe\x0f\xff\xff\xfe\x0f\xff\xff\xfe\x1f\xff'\
b'\xff\xfe\x1f\xff\xff\xf0\x03\xff\xff\xc0\x00\x7f\xff\x00\x00\x3f'\
b'\xfe\x00\x00\x0f\xfc\x00\x00\x07\xf8\x00\x00\x07\xf8\x0e\x0e\x0f'\
b'\xf0\x1e\x0f\x1f\xf0\x3e\x0f\xbf\xf0\x3e\x0f\xff\xf0\x1e\x0f\xff'\
b'\xf0\x1e\x0f\xff\xf8\x0e\x0f\xff\xf8\x02\x0f\xff\xfc\x00\x0f\xff'\
b'\xfc\x00\x0f\xff\xfe\x00\x03\xff\xff\x80\x00\xff\xff\xc0\x00\x7f'\
b'\xff\xf0\x00\x1f\xff\xfc\x00\x0f\xff\xfe\x00\x07\xff\xfe\x00\x07'\
b'\xff\xfe\x0c\x03\xff\xfe\x0e\x03\xff\xfe\x0f\x03\xff\xfe\x0f\x03'\
b'\xff\xfe\x0f\x03\xfc\xfe\x0f\x03\xf8\xfe\x0e\x03\xf8\x3e\x0e\x07'\
b'\xf0\x1e\x0c\x07\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf8\x00\x00\x1f'\
b'\xfe\x00\x00\x3f\xff\x80\x00\xff\xff\xf0\x03\xff\xff\xfe\x1f\xff'\
b'\xff\xfe\x1f\xff\xff\xfe\x1f\xff\xff\xfe\x1f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xfc\x0f\xff\xff\xf8\x07\xff\x03\xe0\x03\xff\x07'\
b'\xe0\x01\xfe\x07\xc0\x00\xfe\x0f\xc1\xe0\xfc\x1f\x83\xe0\xf8\x1f'\
b'\x83\xe0\xf8\x3f\x83\xe0\x70\x3f\x83\xe0\xf0\x7f\x83\xe0\xe0\x7f'\
b'\xc1\xe0\xe0\xff\xc0\x00\xc1\xff\xe0\x01\x81\xff\xe0\x03\x83\xff'\
b'\xf0\x07\x03\xff\xfc\x0f\x07\xff\xff\xfe\x07\xff\xff\xfe\x0f\xff'\
b'\xff\xfc\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x3f\xff\xff\xf0\x3f\xff'\
b'\xff\xf0\x7f\xff\xff\xe0\xf8\x3f\xff\xc0\xe0\x0f\xff\xc1\xc0\x07'\
b'\xff\x81\x80\x03\xff\x83\x80\x03\xff\x03\x03\x81\xff\x07\x07\xc1'\
b'\xfe\x0f\x07\xc1\xfc\x0f\x07\xc1\xfc\x1f\x07\xc1\xf8\x1f\x07\xc1'\
b'\xf8\x3f\x03\x81\xf0\x3f\x80\x03\xf0\x7f\x80\x03\xe0\xff\xc0\x07'\
b'\xc0\xff\xe0\x0f\xff\xff\xf8\x3f\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x3f\xff'\
b'\xff\x80\x0f\xff\xff\x00\x07\xff\xfe\x00\x03\xff\xfc\x00\x01\xff'\
b'\xfc\x02\x01\xff\xf8\x0f\x80\xff\xf8\x0f\xc0\xff\xf8\x1f\xc0\xff'\
b'\xf8\x1f\xc0\xff\xf8\x0f\xc0\xff\xf8\x0f\x81\xff\xfc\x0f\x81\xff'\
b'\xfc\x07\x01\xff\xfe\x02\x03\xff\xfe\x00\x03\xff\xff\x00\x07\xff'\
b'\xff\x80\x0f\xff\xff\x80\x1f\xff\xfe\x00\x3f\xff\xfc\x00\x3f\xff'\
b'\xf8\x00\x1f\xdf\xf0\x00\x0f\xcf\xf0\x08\x0f\x83\xe0\x1c\x07\x81'\
b'\xe0\x3c\x03\x01\xc0\x7e\x03\x03\xc0\x7f\x00\x03\xc0\x7f\x00\x07'\
b'\xc0\x7f\x80\x07\xc0\x7f\xc0\x0f\xc0\x7f\xe0\x1f\xc0\x7f\xc0\x1f'\
b'\xc0\x3f\x80\x0f\xe0\x1f\x00\x07\xe0\x00\x00\x07\xf0\x00\x00\x03'\
b'\xf0\x00\x06\x03\xf8\x00\x0f\x0f\xfe\x00\x3f\x9f\xff\xc1\xff\xbf'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xfc\x3f\xff\xff\xfc\x3f\xff\xff\xfc\x3f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\x7f\xff\xff\xfc\x7f\xff\xff\xf0\x3f\xff\xff\xe0\x3f'\
b'\xff\xff\x80\x3f\xff\xff\x00\x1f\xff\xfe\x00\x3f\xff\xfc\x00\xff'\
b'\xff\xf8\x01\xff\xff\xf0\x03\xff\xff\xf0\x0f\xff\xff\xe0\x1f\xff'\
b'\xff\xc0\x1f\xff\xff\xc0\x3f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff'\
b'\xff\x00\xff\xff\xff\x01\xff\xff\xff\x01\xff\xff\xfe\x01\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x01\xff\xff\xff\x01\xff\xff\xff\x01\xff\xff'\
b'\xff\x00\xff\xff\xff\x80\xff\xff\xff\x80\x7f\xff\xff\x80\x7f\xff'\
b'\xff\xc0\x3f\xff\xff\xe0\x1f\xff\xff\xe0\x0f\xff\xff\xf0\x07\xff'\
b'\xff\xf8\x03\xff\xff\xf8\x01\xff\xff\xfc\x00\xff\xff\xfe\x00\x3f'\
b'\xff\xff\x00\x1f\xff\xff\xc0\x1f\xff\xff\xe0\x1f\xff\xff\xf8\x3f'\
b'\xff\xff\xfc\x3f\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xbf\xff\xff\xff\x0f\xff\xff'\
b'\xff\x03\xff\xff\xff\x01\xff\xff\xfe\x00\xff\xff\xfe\x00\x3f\xff'\
b'\xff\x00\x1f\xff\xff\x80\x0f\xff\xff\xe0\x07\xff\xff\xf0\x07\xff'\
b'\xff\xf8\x03\xff\xff\xfc\x01\xff\xff\xfe\x01\xff\xff\xff\x00\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x3f'\
b'\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x1f\xff\xff\xf0\x1f'\
b'\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f'\
b'\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f'\
b'\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x3f'\
b'\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x7f'\
b'\xff\xff\x80\x7f\xff\xff\x80\xff\xff\xff\x00\xff\xff\xfe\x01\xff'\
b'\xff\xfc\x01\xff\xff\xf8\x03\xff\xff\xf0\x07\xff\xff\xe0\x0f\xff'\
b'\xff\x80\x1f\xff\xff\x00\x3f\xff\xfe\x00\x7f\xff\xfe\x00\xff\xff'\
b'\xfe\x01\xff\xff\xff\x07\xff\xff\xff\x0f\xff\xff\xff\x3f\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xf0\x07\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xf3\xf8\x1f\xcf\xf0\xf8\x1f\x0f'\
b'\xe0\x38\x1c\x0f\xe0\x08\x10\x07\xe0\x00\x00\x07\xc0\x00\x00\x03'\
b'\xe0\x00\x00\x03\xfe\x00\x00\x3f\xff\xc0\x03\xff\xff\xe0\x07\xff'\
b'\xff\xc0\x03\xff\xff\xc0\x03\xff\xff\x80\x01\xff\xff\x01\x80\xff'\
b'\xfe\x01\x80\x7f\xfc\x03\xc0\x3f\xf8\x07\xc0\x1f\xf0\x07\xe0\x1f'\
b'\xf8\x0f\xf0\x3f\xfe\x0f\xf0\x7f\xff\x1f\xf8\xff\xff\xbf\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xe0\x00\x00\x03\xc0\x00\x00\x03\xc0\x00\x00\x03'\
b'\xc0\x00\x00\x03\xc0\x00\x00\x03\xc0\x00\x00\x03\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x3f\xff\xff\xf0\x1f\xff'\
b'\xff\xe0\x0f\xff\xff\xc0\x0f\xff\xff\xc0\x07\xff\xff\xc0\x07\xff'\
b'\xff\xe0\x07\xff\xff\xf0\x07\xff\xff\xf0\x07\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x3f\xff\xff\xe0\x3f\xff\xff\xc0\x7f\xff\xff\xe0\xff\xff'\
b'\xff\xf1\xff\xff\xff\xfb\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\x7f\xfe\x00\x00\x7f'\
b'\xfe\x00\x00\x7f\xfe\x00\x00\x7f\xfe\x00\x00\x7f\xfe\x00\x00\x7f'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x3f\xff\xff\xf0\x1f\xff'\
b'\xff\xe0\x0f\xff\xff\xe0\x0f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xe0\x0f\xff\xff\xe0\x1f\xff\xff\xf0\x3f\xff\xff\xfe\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xfc\x3f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf0\x0f'\
b'\xff\xff\xf0\x0f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xc0\x3f'\
b'\xff\xff\xc0\x3f\xff\xff\x80\x3f\xff\xff\x80\x7f\xff\xff\x00\x7f'\
b'\xff\xff\x00\xff\xff\xfe\x00\xff\xff\xfe\x01\xff\xff\xfe\x01\xff'\
b'\xff\xfc\x03\xff\xff\xfc\x03\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xe0\x0f\xff\xff\xe0\x1f\xff'\
b'\xff\xc0\x1f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\x80\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x00\xff\xff\xff\x00\xff\xff\xfe\x01\xff\xff'\
b'\xfe\x01\xff\xff\xfc\x03\xff\xff\xfc\x03\xff\xff\xf8\x03\xff\xff'\
b'\xf8\x07\xff\xff\xf8\x07\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff'\
b'\xe0\x1f\xff\xff\xf0\x1f\xff\xff\xfc\x3f\xff\xff\xff\x3f\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x0f\xff'\
b'\xff\xc0\x03\xff\xff\x80\x01\xff\xff\x00\x00\xff\xfe\x00\x00\x7f'\
b'\xfc\x00\x00\x3f\xf8\x03\xc0\x1f\xf8\x0f\xf0\x1f\xf0\x0f\xf0\x0f'\
b'\xf0\x1f\xf8\x0f\xf0\x1f\xf8\x0f\xe0\x3f\xf8\x07\xe0\x3f\xf0\x07'\
b'\xe0\x3f\xe0\x07\xe0\x3f\xc0\x07\xe0\x7f\xc0\x07\xe0\x7f\x80\x07'\
b'\xc0\x7f\x06\x03\xc0\x7e\x06\x03\xc0\x7c\x0e\x03\xc0\x78\x1e\x03'\
b'\xc0\x70\x3e\x03\xc0\x70\x7e\x03\xc0\x60\xfe\x03\xc0\x01\xfe\x07'\
b'\xe0\x03\xfe\x07\xe0\x03\xfc\x07\xe0\x07\xfc\x07\xe0\x0f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xf0\x1f\xf8\x0f\xf0\x1f\xf8\x0f\xf0\x0f\xf0\x0f'\
b'\xf8\x0f\xf0\x1f\xf8\x03\xc0\x1f\xfc\x00\x00\x3f\xfe\x00\x00\x7f'\
b'\xff\x00\x00\xff\xff\x80\x01\xff\xff\xc0\x03\xff\xff\xf8\x1f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfc\x07\xff\xff\xf0\x07\xff\xff\xc0\x07\xff'\
b'\xff\x00\x07\xff\xfe\x00\x07\xff\xf8\x00\x07\xff\xf8\x00\x07\xff'\
b'\xf8\x00\x07\xff\xf8\x08\x07\xff\xfc\x78\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xf8\x07\xff'\
b'\xff\xf8\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x0f\xff'\
b'\xff\x80\x01\xff\xff\x00\x00\xff\xfc\x00\x00\x7f\xf8\x00\x00\x3f'\
b'\xf0\x00\x00\x1f\xe0\x07\xe0\x0f\xe0\x1f\xf0\x0f\xf0\x3f\xf8\x0f'\
b'\xf8\x7f\xf8\x0f\xfc\x7f\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf0\x0f'\
b'\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x3f\xff\xff\xc0\x3f'\
b'\xff\xff\x80\x7f\xff\xff\x00\xff\xff\xfe\x01\xff\xff\xfc\x01\xff'\
b'\xff\xf8\x03\xff\xff\xf0\x07\xff\xff\xe0\x0f\xff\xff\xc0\x1f\xff'\
b'\xff\x80\x3f\xff\xff\x00\x7f\xff\xfe\x00\xff\xff\xfc\x01\xff\xff'\
b'\xf8\x03\xff\xff\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07'\
b'\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xe0\x1f\xff\xff\x00\x03\xff\xfc\x00\x01\xff'\
b'\xf8\x00\x00\x7f\xf0\x00\x00\x7f\xf0\x00\x00\x3f\xf8\x0f\x80\x1f'\
b'\xfc\x3f\xe0\x1f\xfe\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f'\
b'\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xe0\x3f\xff\xff\xc0\x3f'\
b'\xff\xff\x80\x7f\xff\xe0\x00\xff\xff\xc0\x01\xff\xff\xc0\x03\xff'\
b'\xff\xc0\x01\xff\xff\xc0\x00\xff\xff\xc0\x00\x3f\xff\xff\x80\x3f'\
b'\xff\xff\xe0\x1f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xfc\xff\xf0\x0f\xf8\xff\xf0\x0f\xf0\x3f\xe0\x1f\xf0\x0f\x80\x1f'\
b'\xe0\x00\x00\x3f\xe0\x00\x00\x3f\xf0\x00\x00\x7f\xfc\x00\x01\xff'\
b'\xff\x00\x03\xff\xff\xe0\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x7f'\
b'\xff\xff\xe0\x7f\xff\xff\xc0\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f'\
b'\xff\xff\x00\x7f\xff\xfe\x00\x7f\xff\xfe\x00\x7f\xff\xfc\x00\x7f'\
b'\xff\xfc\x00\x7f\xff\xf8\x00\x7f\xff\xf0\x00\x7f\xff\xf0\x00\x7f'\
b'\xff\xe0\x40\x7f\xff\xc0\x40\x7f\xff\xc0\xc0\x7f\xff\x81\xc0\x7f'\
b'\xff\x01\xc0\x7f\xff\x03\xc0\x7f\xfe\x03\xc0\x7f\xfc\x07\xc0\x7f'\
b'\xfc\x0f\xc0\x7f\xf8\x0f\xc0\x7f\xf0\x1f\xc0\x7f\xf0\x3f\xc0\x7f'\
b'\xe0\x00\x00\x03\xe0\x00\x00\x01\xc0\x00\x00\x01\xc0\x00\x00\x01'\
b'\xc0\x00\x00\x01\xe0\x00\x00\x01\xff\xff\xc0\x7f\xff\xff\xc0\x7f'\
b'\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f'\
b'\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xfc\x00\x00\x0f\xfc\x00\x00\x0f\xfc\x00\x00\x0f'\
b'\xfc\x00\x00\x0f\xfc\x00\x00\x0f\xfc\x00\x00\x0f\xfc\x07\xff\xff'\
b'\xfc\x0f\xff\xff\xfc\x0f\xff\xff\xfc\x0f\xff\xff\xfc\x0f\xff\xff'\
b'\xfc\x0f\xff\xff\xfc\x0f\xff\xff\xf8\x0c\x07\xff\xf8\x00\x00\xff'\
b'\xf8\x00\x00\x7f\xf8\x00\x00\x3f\xf8\x00\x00\x1f\xf8\x00\x00\x0f'\
b'\xf8\x07\xe0\x0f\xf8\x1f\xf0\x07\xfe\x3f\xf8\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfe\x03'\
b'\xff\xff\xfe\x03\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\x7f\xfc\x07'\
b'\xfe\x3f\xf8\x07\xfc\x3f\xf8\x07\xf0\x0f\xf0\x0f\xf0\x03\xc0\x0f'\
b'\xf0\x00\x00\x1f\xf8\x00\x00\x3f\xfc\x00\x00\x7f\xfe\x00\x00\xff'\
b'\xff\x80\x03\xff\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x03\xff'\
b'\xff\xf0\x00\x7f\xff\xe0\x00\x1f\xff\x80\x00\x0f\xff\x00\x00\x0f'\
b'\xff\x00\x00\x1f\xfe\x00\xf8\x3f\xfc\x03\xfe\x7f\xfc\x07\xff\xff'\
b'\xf8\x07\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1e\x07\xff\xf0\x10\x00\xff\xf0\x00\x00\x7f'\
b'\xe0\x00\x00\x3f\xe0\x00\x00\x1f\xe0\x00\x00\x1f\xe0\x03\xe0\x0f'\
b'\xe0\x07\xf8\x0f\xe0\x0f\xf8\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x1f\xfc\x07'\
b'\xf0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf8\x0f\xf8\x0f\xf8\x07\xf8\x0f'\
b'\xf8\x07\xf0\x0f\xfc\x01\xc0\x1f\xfe\x00\x00\x1f\xfe\x00\x00\x3f'\
b'\xff\x00\x00\x7f\xff\x80\x00\xff\xff\xe0\x03\xff\xff\xfc\x1f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x0f'\
b'\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xff\xff\xf0\x1f'\
b'\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x3f\xff\xff\xc0\x3f'\
b'\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\x80\x7f\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x01\xff\xff\xff\x01\xff\xff\xfe\x01\xff'\
b'\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfc\x03\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xf8\x07\xff\xff\xf8\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x3f\xff'\
b'\xff\xe0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\x80\xff\xff\xff\x00\xff\xff'\
b'\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x07\xff'\
b'\xff\xc0\x01\xff\xff\x00\x00\x7f\xfe\x00\x00\x3f\xfc\x00\x00\x3f'\
b'\xfc\x00\x00\x1f\xf8\x03\xe0\x1f\xf8\x0f\xf0\x0f\xf8\x0f\xf8\x0f'\
b'\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f'\
b'\xf8\x07\xf0\x1f\xfc\x03\xf0\x1f\xfc\x01\xe0\x3f\xfe\x00\x40\x3f'\
b'\xff\x00\x00\x7f\xff\x80\x00\xff\xff\x80\x01\xff\xff\x00\x00\x7f'\
b'\xfe\x00\x00\x3f\xfc\x03\x80\x1f\xf8\x07\xc0\x1f\xf0\x0f\xf0\x0f'\
b'\xf0\x1f\xf8\x07\xe0\x3f\xf8\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07'\
b'\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xf8\x07'\
b'\xe0\x0f\xf0\x07\xf0\x03\xc0\x0f\xf0\x00\x00\x0f\xf8\x00\x00\x1f'\
b'\xfc\x00\x00\x3f\xfe\x00\x00\x7f\xff\x80\x01\xff\xff\xf0\x0f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xf0\x0f\xff\xff\xc0\x03\xff\xff\x00\x00\xff'\
b'\xfe\x00\x00\x7f\xfc\x00\x00\x7f\xf8\x00\x00\x3f\xf8\x07\xc0\x1f'\
b'\xf0\x0f\xe0\x1f\xf0\x1f\xf0\x1f\xe0\x1f\xf8\x0f\xe0\x3f\xf8\x0f'\
b'\xe0\x3f\xf8\x0f\xe0\x3f\xf8\x0f\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07'\
b'\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x1f\xfc\x07\xf0\x0f\xf8\x07'\
b'\xf0\x07\xf0\x07\xf8\x00\x00\x07\xf8\x00\x00\x07\xfc\x00\x00\x07'\
b'\xfe\x00\x00\x07\xff\x80\x00\x0f\xff\xe0\x18\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf0\x1f\xff\xff\xf0\x1f'\
b'\xff\xff\xe0\x1f\xfe\xff\xe0\x3f\xfc\x7f\xc0\x3f\xf8\x0f\x00\x7f'\
b'\xf0\x00\x00\x7f\xf0\x00\x00\xff\xf0\x00\x01\xff\xf8\x00\x03\xff'\
b'\xfe\x00\x0f\xff\xff\xe0\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x7f\xff'\
b'\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xc0\x0f\xff\xff\xc0\x0f\xff\xff\xe0\x1f\xff\xff\xf0\x3f\xff'\
b'\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x3f\xff\xff\xe0\x1f\xff'\
b'\xff\xe0\x1f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xe0\x0f\xff\xff\xe0\x1f\xff\xff\xf0\x3f\xff\xff\xfc\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xf8\x7f\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff'\
b'\xff\xc0\x0f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xe0\x1f\xff\xff\xf0\x3f\xff\xff\xfc\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xf8\x3f\xff\xff\xf0\x1f\xff\xff\xe0\x0f\xff\xff\xc0\x0f\xff'\
b'\xff\xc0\x07\xff\xff\xc0\x07\xff\xff\xe0\x07\xff\xff\xf0\x07\xff'\
b'\xff\xf0\x07\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x3f\xff\xff\xe0\x7f\xff'\
b'\xff\xc0\x7f\xff\xff\xe0\xff\xff\xff\xf1\xff\xff\xff\xfb\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfb\xff\xff\xff\xe3'\
b'\xff\xff\xff\x83\xff\xff\xfe\x03\xff\xff\xf8\x03\xff\xff\xe0\x03'\
b'\xff\xff\x80\x03\xff\xfe\x00\x0f\xff\xfc\x00\x3f\xff\xf0\x00\xff'\
b'\xff\xc0\x03\xff\xff\x00\x0f\xff\xfc\x00\x3f\xff\xf0\x00\xff\xff'\
b'\xc0\x03\xff\xff\xc0\x0f\xff\xff\xc0\x3f\xff\xff\xc0\x1f\xff\xff'\
b'\xc0\x07\xff\xff\xf0\x01\xff\xff\xf8\x00\xff\xff\xfe\x00\x3f\xff'\
b'\xff\x80\x0f\xff\xff\xc0\x03\xff\xff\xf0\x00\xff\xff\xfc\x00\x3f'\
b'\xff\xfe\x00\x1f\xff\xff\x80\x07\xff\xff\xe0\x03\xff\xff\xf0\x03'\
b'\xff\xff\xfc\x03\xff\xff\xff\x03\xff\xff\xff\x83\xff\xff\xff\xe3'\
b'\xff\xff\xff\xfb\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x00\x00\x07'\
b'\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x03'\
b'\xe0\x00\x00\x03\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x03'\
b'\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xef\xff\xff\xff\xc3\xff\xff\xff'\
b'\xc1\xff\xff\xff\xc0\x7f\xff\xff\xc0\x1f\xff\xff\xc0\x07\xff\xff'\
b'\xe0\x01\xff\xff\xf0\x00\x7f\xff\xfc\x00\x1f\xff\xff\x00\x07\xff'\
b'\xff\xc0\x03\xff\xff\xf0\x00\xff\xff\xfc\x00\x3f\xff\xff\x00\x0f'\
b'\xff\xff\xc0\x03\xff\xff\xf0\x03\xff\xff\xfc\x03\xff\xff\xf8\x03'\
b'\xff\xff\xe0\x03\xff\xff\xc0\x07\xff\xff\x00\x1f\xff\xfc\x00\x7f'\
b'\xff\xf0\x00\xff\xff\xe0\x03\xff\xff\x80\x0f\xff\xfe\x00\x1f\xff'\
b'\xf8\x00\x7f\xff\xe0\x01\xff\xff\xc0\x03\xff\xff\xc0\x0f\xff\xff'\
b'\xc0\x3f\xff\xff\xc0\xff\xff\xff\xc1\xff\xff\xff\xc7\xff\xff\xff'\
b'\xdf\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x07\xff\xff\xc0\x01\xff'\
b'\xff\x00\x00\x7f\xfe\x00\x00\x3f\xfc\x00\x00\x1f\xf8\x00\x00\x1f'\
b'\xf0\x07\xe0\x0f\xf0\x1f\xf8\x0f\xfc\x3f\xf8\x0f\xfe\x7f\xfc\x07'\
b'\xff\x7f\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf0\x0f\xff\xff\xe0\x1f\xff\xff\xc0\x3f'\
b'\xff\xff\x80\x7f\xff\xff\x00\xff\xff\xfe\x01\xff\xff\xfe\x03\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x0f\xff\xff\xfc\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xfc\x1f\xff\xff\xf8\x0f\xff\xff\xf0\x07\xff\xff\xf0\x07\xff'\
b'\xff\xf0\x03\xff\xff\xf0\x03\xff\xff\xf0\x07\xff\xff\xf8\x07\xff'\
b'\xff\xfc\x0f\xff\xff\xff\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x07\xff'\
b'\xff\xe0\x00\xff\xff\x80\x00\x7f\xff\x00\x00\x1f\xfe\x00\x00\x1f'\
b'\xfc\x03\xf8\x0f\xf8\x0f\xfc\x07\xf0\x1f\xfe\x07\xf0\x3f\xff\x07'\
b'\xe0\x7f\xff\x03\xe0\x7f\xff\x03\xe0\xff\xe0\x03\xc0\xff\x00\x03'\
b'\xc0\xfc\x00\x03\xc0\xf8\x00\x03\xc1\xf8\x00\x03\xc1\xf0\x0f\x83'\
b'\xc1\xf0\x3f\x83\xc1\xe0\x7f\x83\x81\xe0\x7f\x03\x81\xe0\x7f\x03'\
b'\xc1\xe0\x7f\x03\xc1\xe0\x7f\x03\xc1\xe0\x7e\x03\xc0\xe0\x3c\x03'\
b'\xc0\xf0\x00\x03\xc0\xf8\x00\x03\xc0\xf8\x00\x03\xe0\x7e\x01\x83'\
b'\xe0\x7f\x03\xff\xe0\x3f\xff\xff\xf0\x1f\xff\xff\xf0\x0f\xff\xff'\
b'\xf8\x07\xff\xbf\xfc\x01\xff\x1f\xfe\x00\x00\x0f\xff\x00\x00\x0f'\
b'\xff\x80\x00\x07\xff\xc0\x00\x1f\xff\xf0\x00\x7f\xff\xff\x03\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfc\x3f\xff\xff\xfc\x3f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xe0\x07\xff\xff\xe0\x07\xff\xff\xe0\x07\xff'\
b'\xff\xc0\x03\xff\xff\xc0\x03\xff\xff\xc0\x03\xff\xff\x80\x01\xff'\
b'\xff\x81\x81\xff\xff\x81\x81\xff\xff\x01\x80\xff\xff\x01\x80\xff'\
b'\xff\x03\xc0\xff\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x07\xe0\x7f'\
b'\xfc\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x3f\xf8\x00\x00\x1f'\
b'\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xf0\x00\x00\x0f\xf0\x00\x00\x0f'\
b'\xf0\x1f\xf8\x0f\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07'\
b'\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\x80\xff\xfe\x01'\
b'\x80\xff\xff\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x00\x0f\xff'\
b'\xc0\x00\x01\xff\xc0\x00\x00\x7f\xc0\x00\x00\x3f\xc0\x00\x00\x1f'\
b'\xc0\x00\x00\x1f\xc0\x3f\xc0\x0f\xc0\x3f\xf0\x0f\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xf8\x07\xc0\x3f\xf8\x07\xc0\x3f\xf8\x07\xc0\x3f\xf8\x0f'\
b'\xc0\x3f\xf8\x0f\xc0\x3f\xf0\x0f\xc0\x3f\xc0\x1f\xc0\x00\x00\x3f'\
b'\xc0\x00\x00\x7f\xc0\x00\x00\xff\xc0\x00\x00\xff\xc0\x00\x00\x3f'\
b'\xc0\x00\x00\x1f\xc0\x3f\xc0\x0f\xc0\x3f\xf0\x07\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xe0\x07\xc0\x00\x00\x0f\xc0\x00\x00\x0f\xc0\x00\x00\x1f'\
b'\xc0\x00\x00\x7f\xc0\x00\x01\xff\xc0\x00\x0f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfc\x03\xff\xff\xe0\x00\x7f\xff\xc0\x00\x1f'\
b'\xff\x00\x00\x0f\xfe\x00\x00\x07\xfc\x00\x00\x07\xfc\x00\xfc\x03'\
b'\xf8\x03\xfe\x01\xf0\x07\xff\x03\xf0\x0f\xff\x87\xe0\x1f\xff\x9f'\
b'\xe0\x1f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xc0\x3f\xff\xff'\
b'\xc0\x3f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff'\
b'\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff'\
b'\xc0\x7f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x0f\xff\xff'\
b'\xf0\x0f\xff\x1f\xf0\x07\xff\x0f\xf8\x03\xfc\x03\xf8\x00\xf0\x03'\
b'\xfc\x00\x00\x07\xfe\x00\x00\x07\xff\x00\x00\x0f\xff\xc0\x00\x3f'\
b'\xff\xe0\x00\x7f\xff\xfe\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x00\x1f\xff'\
b'\xe0\x00\x07\xff\xe0\x00\x01\xff\xe0\x00\x00\xff\xe0\x00\x00\x7f'\
b'\xe0\x00\x00\x3f\xe0\x3f\x80\x1f\xe0\x3f\xe0\x1f\xe0\x3f\xf0\x0f'\
b'\xe0\x3f\xf8\x0f\xe0\x3f\xf8\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07'\
b'\xe0\x3f\xfc\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03'\
b'\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03'\
b'\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03'\
b'\xe0\x3f\xfe\x03\xe0\x3f\xfc\x03\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07'\
b'\xe0\x3f\xf8\x07\xe0\x3f\xf8\x0f\xe0\x3f\xf0\x0f\xe0\x3f\xe0\x1f'\
b'\xe0\x3f\x80\x1f\xe0\x00\x00\x3f\xe0\x00\x00\x7f\xe0\x00\x00\xff'\
b'\xe0\x00\x01\xff\xe0\x00\x07\xff\xe0\x00\x3f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xe0\x00\x00\x07\xe0\x00\x00\x07\xe0\x00\x00\x07'\
b'\xe0\x00\x00\x07\xe0\x00\x00\x07\xe0\x00\x00\x07\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x00\x00\x3f\xe0\x00\x00\x3f\xe0\x00\x00\x3f'\
b'\xe0\x00\x00\x3f\xe0\x00\x00\x3f\xe0\x00\x00\x3f\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x00\x00\x07'\
b'\xe0\x00\x00\x07\xe0\x00\x00\x07\xe0\x00\x00\x07\xe0\x00\x00\x07'\
b'\xe0\x00\x00\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x00\x00\x07'\
b'\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07'\
b'\xf0\x00\x00\x07\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x00\x00\x3f\xf0\x00\x00\x3f'\
b'\xf0\x00\x00\x3f\xf0\x00\x00\x3f\xf0\x00\x00\x3f\xf0\x00\x00\x3f'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfc\x03\xff\xff\xe0\x00\xff\xff\x80\x00\x3f'\
b'\xff\x00\x00\x1f\xfe\x00\x00\x0f\xfc\x00\x00\x07\xf8\x01\xf8\x03'\
b'\xf0\x07\xfe\x03\xf0\x0f\xff\x07\xe0\x1f\xff\x0f\xe0\x1f\xff\x9f'\
b'\xe0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x7f\xff\xff'\
b'\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff'\
b'\xc0\x7f\xff\xff\xc0\x7f\x80\x03\xc0\x7f\x80\x03\xc0\x7f\x80\x03'\
b'\xc0\x7f\x80\x03\xc0\x7f\x80\x03\xc0\x7f\x80\x03\xc0\x3f\xff\x03'\
b'\xc0\x3f\xff\x03\xe0\x3f\xff\x03\xe0\x3f\xff\x03\xe0\x1f\xff\x03'\
b'\xf0\x0f\xff\x03\xf0\x0f\xff\x03\xf8\x03\xfe\x03\xf8\x00\xf0\x03'\
b'\xfc\x00\x00\x03\xfe\x00\x00\x03\xff\x00\x00\x07\xff\x80\x00\x0f'\
b'\xff\xe0\x00\x7f\xff\xfc\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x3f\xfc\x03'\
b'\xc0\x3f\xfc\x03\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x00\x00\x07'\
b'\xc0\x00\x00\x07\xc0\x00\x00\x07\xc0\x00\x00\x07\xc0\x00\x00\x07'\
b'\xc0\x00\x00\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07'\
b'\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xc0\x3f\xfc\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xf8\x00\x00\x1f'\
b'\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xf8\x00\x00\x1f'\
b'\xf0\x00\x00\x1f\xf0\x00\x00\x1f\xf0\x00\x00\x1f\xf0\x00\x00\x1f'\
b'\xf0\x00\x00\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\x00\x03'\
b'\xff\x80\x00\x03\xff\x80\x00\x03\xff\x80\x00\x03\xff\x80\x00\x03'\
b'\xff\x80\x00\x03\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x00\xff\xff\xff\x00\xff\xf9\xff\x00\xff\xf0\xfe\x01\xff'\
b'\xf0\x7c\x01\xff\xe0\x00\x03\xff\xc0\x00\x03\xff\xc0\x00\x07\xff'\
b'\xe0\x00\x0f\xff\xf0\x00\x1f\xff\xfc\x00\x7f\xff\xff\x81\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xc0\x3f\xfc\x03\xc0\x3f\xf8\x03\xc0\x3f\xf0\x07'\
b'\xc0\x3f\xf0\x0f\xc0\x3f\xe0\x1f\xc0\x3f\xc0\x3f\xc0\x3f\x80\x3f'\
b'\xc0\x3f\x00\x7f\xc0\x3f\x00\xff\xc0\x3e\x01\xff\xc0\x3c\x03\xff'\
b'\xc0\x38\x03\xff\xc0\x30\x07\xff\xc0\x30\x0f\xff\xc0\x20\x1f\xff'\
b'\xc0\x00\x3f\xff\xc0\x00\x3f\xff\xc0\x00\x3f\xff\xc0\x00\x3f\xff'\
b'\xc0\x00\x1f\xff\xc0\x00\x0f\xff\xc0\x00\x0f\xff\xc0\x00\x07\xff'\
b'\xc0\x18\x07\xff\xc0\x3c\x03\xff\xc0\x3c\x01\xff\xc0\x3e\x01\xff'\
b'\xc0\x3e\x00\xff\xc0\x3f\x00\x7f\xc0\x3f\x80\x7f\xc0\x3f\x80\x3f'\
b'\xc0\x3f\xc0\x3f\xc0\x3f\xc0\x1f\xc0\x3f\xe0\x0f\xc0\x3f\xf0\x0f'\
b'\xc0\x3f\xf0\x07\xc0\x3f\xf8\x07\xc0\x3f\xf8\x03\xc0\x3f\xfc\x01'\
b'\xc0\x3f\xfe\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x0f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07'\
b'\xf0\x00\x00\x07\xf0\x00\x00\x07\xf0\x00\x00\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xc0\x7f\xff\x03\xc0\x7f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x1f\xf8\x03\xc0\x1f\xf8\x03'\
b'\xc0\x0f\xf8\x03\xc0\x0f\xf0\x03\xc0\x0f\xf0\x03\xc0\x07\xe0\x03'\
b'\xc0\x07\xe0\x03\xc0\x03\xe0\x03\xc0\x03\xc1\x03\xc0\x81\xc1\x03'\
b'\xc0\xc1\x81\x03\xc0\xc1\x83\x03\xc0\xc0\x03\x03\xc0\xe0\x07\x03'\
b'\xc0\xe0\x07\x03\xc0\xf0\x0f\x03\xc0\xf0\x0f\x03\xc0\xf0\x0f\x03'\
b'\xc0\xf8\x1f\x03\xc0\xf8\x1f\x03\xc0\xfc\x3f\x03\xc0\xfc\x3f\x03'\
b'\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03'\
b'\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03'\
b'\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\xff\xff\x03'\
b'\xc0\xff\xff\x03\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x3f\xfe\x03'\
b'\xc0\x1f\xfe\x03\xc0\x1f\xfe\x03\xc0\x0f\xfe\x03\xc0\x0f\xfe\x03'\
b'\xc0\x07\xfe\x03\xc0\x07\xfe\x03\xc0\x03\xfe\x03\xc0\x03\xfe\x03'\
b'\xc0\x01\xfe\x03\xc0\x01\xfe\x03\xc0\x00\xfe\x03\xc0\x00\xfe\x03'\
b'\xc0\x40\x7e\x03\xc0\x40\x7e\x03\xc0\x60\x3e\x03\xc0\x60\x3e\x03'\
b'\xc0\x70\x1e\x03\xc0\x70\x1e\x03\xc0\x78\x0e\x03\xc0\x78\x06\x03'\
b'\xc0\x7c\x06\x03\xc0\x7c\x02\x03\xc0\x7e\x02\x03\xc0\x7e\x00\x03'\
b'\xc0\x7f\x00\x03\xc0\x7f\x00\x03\xc0\x7f\x80\x03\xc0\x7f\x80\x03'\
b'\xc0\x7f\xc0\x03\xc0\x7f\xc0\x03\xc0\x7f\xe0\x03\xc0\x7f\xe0\x03'\
b'\xc0\x7f\xf0\x03\xc0\x7f\xf0\x03\xc0\x7f\xf8\x03\xc0\x7f\xf8\x03'\
b'\xc0\x7f\xfc\x03\xc0\x7f\xfc\x03\xc0\x7f\xfe\x03\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xf0\x0f\xff\xff\x80\x01\xff\xff\x00\x00\xff'\
b'\xfe\x00\x00\x3f\xfc\x00\x00\x3f\xf8\x00\x00\x1f\xf0\x03\xe0\x0f'\
b'\xf0\x0f\xf0\x0f\xe0\x1f\xf8\x07\xe0\x1f\xfc\x07\xc0\x3f\xfc\x03'\
b'\xc0\x3f\xfc\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x01'\
b'\x80\x7f\xfe\x01\x80\x7f\xff\x01\x80\x7f\xff\x01\x80\x7f\xff\x01'\
b'\x80\xff\xff\x01\x80\xff\xff\x01\x80\xff\xff\x01\x80\x7f\xff\x01'\
b'\x80\x7f\xff\x01\x80\x7f\xff\x01\x80\x7f\xfe\x01\xc0\x7f\xfe\x01'\
b'\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x3f\xfc\x03\xe0\x3f\xfc\x03'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xf8\x07\xf0\x0f\xf0\x0f\xf0\x03\xc0\x0f'\
b'\xf8\x00\x00\x1f\xfc\x00\x00\x3f\xfe\x00\x00\x7f\xff\x00\x00\xff'\
b'\xff\xc0\x03\xff\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x00\x07\xff'\
b'\xe0\x00\x00\xff\xe0\x00\x00\x3f\xe0\x00\x00\x1f\xe0\x00\x00\x0f'\
b'\xe0\x00\x00\x07\xe0\x1f\xc0\x07\xe0\x1f\xf8\x03\xe0\x1f\xfc\x03'\
b'\xe0\x1f\xfc\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03'\
b'\xe0\x1f\xfc\x03\xe0\x1f\xfc\x03\xe0\x1f\xf8\x03\xe0\x1f\xe0\x07'\
b'\xe0\x00\x00\x07\xe0\x00\x00\x0f\xe0\x00\x00\x1f\xe0\x00\x00\x3f'\
b'\xe0\x00\x00\xff\xe0\x00\x03\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xf0\x0f\xff\xff\xc0\x01\xff\xff\x00\x00\xff'\
b'\xfe\x00\x00\x7f\xfc\x00\x00\x3f\xf8\x00\x00\x1f\xf0\x03\xc0\x0f'\
b'\xf0\x0f\xf0\x0f\xe0\x1f\xf8\x07\xe0\x1f\xfc\x07\xe0\x3f\xfc\x03'\
b'\xc0\x3f\xfc\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x01'\
b'\xc0\x7f\xfe\x01\x80\x7f\xff\x01\x80\x7f\xff\x01\x80\x7f\xff\x01'\
b'\x80\xff\xff\x01\x80\xff\xff\x01\x80\xff\xff\x01\x80\x7f\xff\x01'\
b'\x80\x7f\xff\x01\x80\x7f\xff\x01\xc0\x7f\xff\x01\xc0\x7f\xfe\x01'\
b'\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x3f\xfe\x03\xe0\x3f\xfc\x03'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xf8\x07\xf0\x0f\xf0\x07\xf0\x03\xe0\x0f'\
b'\xf8\x00\x00\x1f\xfc\x00\x00\x1f\xfe\x00\x00\x3f\xff\x00\x00\x7f'\
b'\xff\x80\x01\xff\xff\xf0\x07\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x00\x0f\xff\xfc\x00\x0f\xff\xfc\x00\x0f'\
b'\xff\xfe\x00\x0f\xff\xff\x00\x0f\xff\xff\xe0\x0f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x00\x07\xff'\
b'\xe0\x00\x00\xff\xe0\x00\x00\x7f\xe0\x00\x00\x1f\xe0\x00\x00\x1f'\
b'\xe0\x00\x00\x0f\xe0\x3f\x80\x07\xe0\x3f\xf0\x07\xe0\x3f\xf8\x07'\
b'\xe0\x3f\xfc\x07\xe0\x3f\xfc\x03\xe0\x3f\xfc\x03\xe0\x3f\xfc\x03'\
b'\xe0\x3f\xfc\x07\xe0\x3f\xf8\x07\xe0\x3f\xf8\x07\xe0\x3f\xe0\x0f'\
b'\xe0\x00\x00\x0f\xe0\x00\x00\x1f\xe0\x00\x00\x3f\xe0\x00\x00\x7f'\
b'\xe0\x00\x00\xff\xe0\x00\x03\xff\xe0\x3e\x01\xff\xe0\x3f\x01\xff'\
b'\xe0\x3f\x00\xff\xe0\x3f\x80\xff\xe0\x3f\x80\x7f\xe0\x3f\xc0\x7f'\
b'\xe0\x3f\xc0\x3f\xe0\x3f\xc0\x3f\xe0\x3f\xe0\x1f\xe0\x3f\xe0\x1f'\
b'\xe0\x3f\xf0\x0f\xe0\x3f\xf0\x0f\xe0\x3f\xf8\x07\xe0\x3f\xf8\x07'\
b'\xe0\x3f\xfc\x03\xe0\x3f\xfc\x03\xe0\x3f\xfe\x03\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xf0\x07\xff\xff\x80\x00\xff\xff\x00\x00\x3f'\
b'\xfe\x00\x00\x1f\xfc\x00\x00\x0f\xf8\x00\x00\x0f\xf0\x03\xf0\x1f'\
b'\xf0\x0f\xfc\x1f\xf0\x1f\xfe\x3f\xf0\x1f\xfe\x7f\xf0\x1f\xff\xff'\
b'\xf0\x1f\xff\xff\xf0\x0f\xff\xff\xf0\x07\xff\xff\xf8\x03\xff\xff'\
b'\xf8\x00\xff\xff\xfc\x00\x3f\xff\xfe\x00\x0f\xff\xff\x00\x03\xff'\
b'\xff\x80\x00\xff\xff\xe0\x00\x7f\xff\xf8\x00\x1f\xff\xfe\x00\x0f'\
b'\xff\xff\x80\x0f\xff\xff\xe0\x07\xff\xff\xf0\x07\xff\xff\xf8\x07'\
b'\xff\xff\xfc\x03\xff\xff\xfc\x03\xff\xff\xfc\x03\xf9\xff\xfc\x03'\
b'\xf8\xff\xfc\x07\xf0\x7f\xfc\x07\xf0\x1f\xf0\x07\xe0\x03\xc0\x0f'\
b'\xc0\x00\x00\x0f\xe0\x00\x00\x1f\xf0\x00\x00\x3f\xfc\x00\x00\x7f'\
b'\xff\x00\x01\xff\xff\xf0\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\x00\x00\x01'\
b'\x80\x00\x00\x01\x80\x00\x00\x01\x80\x00\x00\x01\x80\x00\x00\x01'\
b'\x80\x00\x00\x01\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff'\
b'\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xf0\x1f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xc0\x3f\xfe\x03\xc0\x3f\xfc\x03\xe0\x3f\xfc\x03\xe0\x3f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xf8\x07\xf0\x0f\xf0\x0f\xf0\x03\xc0\x0f'\
b'\xf8\x00\x00\x1f\xfc\x00\x00\x1f\xfe\x00\x00\x3f\xff\x00\x00\xff'\
b'\xff\xc0\x01\xff\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\xff\xff\x01'\
b'\x80\x7f\xff\x01\xc0\x7f\xff\x01\xc0\x7f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xe0\x3f\xfe\x03\xe0\x3f\xfe\x07\xe0\x1f\xfc\x07\xf0\x1f\xfc\x07'\
b'\xf0\x1f\xfc\x0f\xf0\x1f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x1f'\
b'\xf8\x0f\xf8\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x3f'\
b'\xfe\x07\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x7f\xff\x03\xe0\x7f'\
b'\xff\x01\xc0\x7f\xff\x01\xc0\xff\xff\x01\xc0\xff\xff\x80\x80\xff'\
b'\xff\x80\x81\xff\xff\x80\x81\xff\xff\xc0\x01\xff\xff\xc0\x03\xff'\
b'\xff\xc0\x03\xff\xff\xe0\x03\xff\xff\xe0\x07\xff\xff\xe0\x07\xff'\
b'\xff\xf0\x07\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x1f\xff\xff\xfc\x1f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\x81\xff\xff\xc0\x81\xff\xff\xc0\x81\xff\xff\xc1'\
b'\x81\xfe\x1f\x81\x81\xfc\x1f\x81\x80\xfc\x1f\x81\x80\xfc\x1f\x81'\
b'\xc0\xfc\x0f\x81\xc0\xf8\x0f\x81\xc0\xf8\x0f\x81\xc0\xf8\x0f\x83'\
b'\xc0\xf8\x07\x83\xc0\xf0\x07\x83\xc0\xf0\x07\x83\xc0\x70\x07\x03'\
b'\xe0\x70\x03\x03\xe0\x70\x03\x03\xe0\x60\x83\x07\xe0\x60\x83\x07'\
b'\xe0\x61\x83\x07\xe0\x61\x81\x07\xe0\x41\xc1\x07\xf0\x41\xc1\x07'\
b'\xf0\x03\xc0\x07\xf0\x03\xc0\x07\xf0\x03\xe0\x0f\xf0\x03\xe0\x0f'\
b'\xf0\x03\xe0\x0f\xf0\x07\xe0\x0f\xf0\x07\xe0\x0f\xf8\x07\xf0\x0f'\
b'\xf8\x07\xf0\x0f\xf8\x0f\xf0\x0f\xf8\x0f\xf0\x1f\xf8\x0f\xf8\x1f'\
b'\xf8\x0f\xf8\x1f\xf8\x1f\xf8\x1f\xf8\x1f\xf8\x1f\xfc\x1f\xfc\x1f'\
b'\xfc\x1f\xfc\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x3f\xfc\x03'\
b'\xe0\x3f\xfc\x07\xe0\x1f\xf8\x07\xf0\x1f\xf8\x0f\xf0\x0f\xf0\x0f'\
b'\xf8\x0f\xf0\x1f\xf8\x07\xe0\x1f\xfc\x07\xe0\x3f\xfc\x03\xc0\x3f'\
b'\xfe\x03\xc0\x7f\xfe\x01\x80\x7f\xff\x01\x80\xff\xff\x00\x00\xff'\
b'\xff\x80\x01\xff\xff\xc0\x01\xff\xff\xc0\x03\xff\xff\xe0\x03\xff'\
b'\xff\xe0\x07\xff\xff\xf0\x07\xff\xff\xf0\x0f\xff\xff\xf0\x07\xff'\
b'\xff\xe0\x07\xff\xff\xe0\x03\xff\xff\xc0\x03\xff\xff\xc0\x01\xff'\
b'\xff\x80\x01\xff\xff\x80\x00\xff\xff\x00\x80\xff\xff\x01\x80\x7f'\
b'\xfe\x01\xc0\x7f\xfe\x03\xc0\x3f\xfc\x03\xe0\x1f\xfc\x07\xe0\x1f'\
b'\xf8\x07\xf0\x0f\xf8\x0f\xf0\x0f\xf0\x0f\xf8\x07\xf0\x1f\xf8\x07'\
b'\xe0\x1f\xfc\x03\xe0\x3f\xfc\x03\xc0\x3f\xfe\x01\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\x80\x7f\xff\x01\xc0\x7f\xfe\x01\xc0\x3f\xfe\x03'\
b'\xe0\x3f\xfc\x03\xe0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x0f\xf8\x0f'\
b'\xf8\x0f\xf8\x0f\xf8\x07\xf0\x1f\xfc\x07\xf0\x1f\xfc\x03\xe0\x1f'\
b'\xfe\x03\xe0\x3f\xfe\x01\xe0\x3f\xff\x01\xc0\x7f\xff\x00\xc0\x7f'\
b'\xff\x80\x80\xff\xff\x80\x00\xff\xff\xc0\x01\xff\xff\xc0\x01\xff'\
b'\xff\xe0\x03\xff\xff\xe0\x03\xff\xff\xf0\x03\xff\xff\xf0\x07\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x00\x00\x03'\
b'\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x03\xe0\x00\x00\x03'\
b'\xe0\x00\x00\x07\xff\xff\xf8\x0f\xff\xff\xf0\x0f\xff\xff\xe0\x1f'\
b'\xff\xff\xe0\x1f\xff\xff\xc0\x3f\xff\xff\x80\x7f\xff\xff\x80\x7f'\
b'\xff\xff\x00\xff\xff\xff\x01\xff\xff\xfe\x01\xff\xff\xfc\x03\xff'\
b'\xff\xfc\x03\xff\xff\xf8\x07\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xc0\x3f\xff\xff\x80\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x00\xff\xff\xfe\x01\xff\xff\xfe\x01\xff\xff'\
b'\xfc\x03\xff\xff\xfc\x03\xff\xff\xf8\x07\xff\xff\xf0\x0f\xff\xff'\
b'\xf0\x0f\xff\xff\xe0\x00\x00\x01\xc0\x00\x00\x01\xc0\x00\x00\x01'\
b'\xc0\x00\x00\x01\xc0\x00\x00\x01\xc0\x00\x00\x01\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\x0f\xfe\x00\x00\x0f'\
b'\xfe\x00\x00\x0f\xfe\x00\x00\x0f\xfe\x00\x00\x0f\xfe\x00\x00\x1f'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff\xfe\x03\xff\xff'\
b'\xfe\x03\xff\xff\xfe\x00\x00\x1f\xfe\x00\x00\x0f\xfe\x00\x00\x0f'\
b'\xfe\x00\x00\x0f\xfe\x00\x00\x0f\xfe\x00\x00\x0f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\x3f\xff\xff\xfc\x3f\xff\xff\xf0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x07\xff\xff\xf8\x07\xff\xff'\
b'\xf8\x03\xff\xff\xfc\x03\xff\xff\xfc\x03\xff\xff\xfe\x01\xff\xff'\
b'\xfe\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x80\x7f\xff'\
b'\xff\x80\x7f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x1f\xff'\
b'\xff\xe0\x1f\xff\xff\xe0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xfc\x03\xff\xff\xfc\x03\xff'\
b'\xff\xfe\x01\xff\xff\xfe\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff'\
b'\xff\xff\x00\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\xc0\x3f'\
b'\xff\xff\xc0\x3f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xf0\x0f'\
b'\xff\xff\xf0\x0f\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xfc\x0f'\
b'\xff\xff\xfc\x3f\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x0f\xff\x00\x00\x0f'\
b'\xff\x00\x00\x0f\xff\x00\x00\x0f\xff\x00\x00\x0f\xff\x00\x00\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f'\
b'\xff\xff\xf8\x0f\xff\x00\x00\x0f\xff\x00\x00\x0f\xff\x00\x00\x0f'\
b'\xff\x00\x00\x0f\xff\x00\x00\x0f\xff\x00\x00\x0f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x3f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf0\x0f\xff\xff\xf0\x07\xff'\
b'\xff\xe0\x07\xff\xff\xc0\x03\xff\xff\xc0\x03\xff\xff\x80\x01\xff'\
b'\xff\x80\x01\xff\xff\x01\x80\xff\xfe\x01\x80\xff\xfe\x03\xc0\x7f'\
b'\xfc\x07\xc0\x3f\xfc\x07\xe0\x3f\xf8\x0f\xe0\x1f\xf8\x0f\xf0\x1f'\
b'\xfe\x1f\xf8\x7f\xff\x9f\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xc0\x00\x00\x03\xc0\x00\x00\x03\xc0\x00\x00\x03'\
b'\xc0\x00\x00\x03\xc0\x00\x00\x03\xc0\x00\x00\x03\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf9\xff\xff\xff\xf1\xff\xff'\
b'\xff\xe1\xff\xff\xff\x80\xff\xff\xff\xc0\xff\xff\xff\xc0\x7f\xff'\
b'\xff\xe0\x7f\xff\xff\xf0\x3f\xff\xff\xf8\x3f\xff\xff\xf8\x1f\xff'\
b'\xff\xfc\x1f\xff\xff\xfe\x1f\xff\xff\xff\x7f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x1f\xff'\
b'\xff\x80\x01\xff\xfc\x00\x00\xff\xf8\x00\x00\x3f\xf0\x00\x00\x3f'\
b'\xf0\x00\x00\x1f\xf8\x0f\xc0\x0f\xfc\x3f\xf0\x0f\xfe\xff\xf8\x0f'\
b'\xff\xff\xf8\x07\xff\xff\xf8\x07\xff\xff\xfc\x07\xff\xf8\x00\x07'\
b'\xff\x80\x00\x07\xfe\x00\x00\x07\xfc\x00\x00\x07\xf8\x00\x00\x07'\
b'\xf0\x03\xfc\x07\xe0\x1f\xf8\x07\xe0\x3f\xf8\x07\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xf0\x07\xc0\x3f\xf0\x07\xc0\x3f\xe0\x07\xe0\x0f\x00\x07'\
b'\xe0\x00\x00\x07\xe0\x00\x00\x07\xf0\x00\x00\x07\xf8\x00\x18\x07'\
b'\xfe\x00\x38\x07\xff\xe3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\x07\xff\xe0\x38\x00\xff\xe0\x30\x00\x7f'\
b'\xe0\x00\x00\x1f\xe0\x00\x00\x1f\xe0\x00\x00\x0f\xe0\x03\xe0\x07'\
b'\xe0\x07\xf8\x07\xe0\x0f\xf8\x07\xe0\x1f\xfc\x03\xe0\x1f\xfc\x03'\
b'\xe0\x1f\xfc\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03'\
b'\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03'\
b'\xe0\x1f\xfc\x03\xe0\x1f\xfc\x03\xe0\x0f\xfc\x07\xe0\x0f\xf8\x07'\
b'\xe0\x07\xf0\x07\xe0\x01\xc0\x0f\xe0\x00\x00\x1f\xe0\x00\x00\x1f'\
b'\xe0\x20\x00\x3f\xe0\x70\x00\xff\xe0\x78\x01\xff\xff\xff\x9f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x07\xff'\
b'\xff\xf0\x00\xff\xff\xc0\x00\x3f\xff\x00\x00\x0f\xfe\x00\x00\x07'\
b'\xfc\x00\x00\x03\xf8\x00\xf8\x03\xf8\x03\xfe\x07\xf0\x0f\xff\x0f'\
b'\xf0\x0f\xff\x9f\xe0\x1f\xff\xbf\xe0\x1f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff'\
b'\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xf0\x0f\xff\xbf\xf0\x07\xff\x1f\xf8\x03\xfc\x0f\xf8\x00\x20\x07'\
b'\xfc\x00\x00\x07\xfe\x00\x00\x07\xff\x00\x00\x1f\xff\xc0\x00\x3f'\
b'\xff\xf0\x00\xff\xff\xff\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xf0\x7c\x07\xff\x80\x0c\x07\xfe\x00\x04\x07'\
b'\xfc\x00\x00\x07\xf8\x00\x00\x07\xf8\x00\x00\x07\xf0\x07\xc0\x07'\
b'\xf0\x0f\xe0\x07\xe0\x1f\xf0\x07\xe0\x3f\xf8\x07\xe0\x3f\xf8\x07'\
b'\xc0\x3f\xf8\x07\xc0\x7f\xf8\x07\xc0\x7f\xf8\x07\xc0\x7f\xf8\x07'\
b'\xc0\x7f\xf8\x07\xc0\x7f\xf8\x07\xc0\x7f\xf8\x07\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xf8\x07\xe0\x3f\xf8\x07\xe0\x1f\xf0\x07\xe0\x0f\xf0\x07'\
b'\xf0\x07\xe0\x07\xf0\x01\x80\x07\xf8\x00\x00\x07\xfc\x00\x00\x07'\
b'\xfe\x00\x04\x07\xff\x00\x0c\x07\xff\x80\x1c\x07\xff\xfc\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff'\
b'\xff\xc0\x03\xff\xff\x80\x00\xff\xfe\x00\x00\x7f\xfc\x00\x00\x3f'\
b'\xfc\x00\x00\x1f\xf8\x07\xe0\x1f\xf0\x0f\xf8\x0f\xf0\x1f\xf8\x0f'\
b'\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x00\x00\x07'\
b'\xe0\x00\x00\x07\xc0\x00\x00\x07\xc0\x00\x00\x07\xc0\x00\x00\x07'\
b'\xe0\x00\x00\x07\xe0\x3f\xff\xff\xe0\x3f\xff\xff\xe0\x3f\xff\xff'\
b'\xe0\x1f\xff\xff\xf0\x0f\xff\x7f\xf0\x07\xfe\x3f\xf8\x01\xf0\x1f'\
b'\xfc\x00\x00\x0f\xfe\x00\x00\x0f\xff\x00\x00\x1f\xff\x80\x00\x7f'\
b'\xff\xe0\x01\xff\xff\xff\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\x80\x7f\xff\xfe\x00\x0f\xff\xf8\x00\x07\xff\xf0\x00\x01'\
b'\xff\xe0\x00\x01\xff\xc0\x00\x03\xff\xc0\x1f\x83\xff\xc0\x7f\xe7'\
b'\xff\x80\x7f\xe7\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xc0\x00\x00\x7f\xc0\x00\x00\x7f'\
b'\xc0\x00\x00\x7f\xc0\x00\x00\x7f\xc0\x00\x00\x7f\xe0\x00\x00\x7f'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x3f\xc3'\
b'\xff\x80\x0f\x01\xff\x00\x00\x01\xfc\x00\x00\x01\xfc\x00\x00\x01'\
b'\xf8\x00\x00\x3f\xf0\x0f\x80\x7f\xf0\x1f\xe0\x7f\xf0\x3f\xe0\x3f'\
b'\xf0\x3f\xe0\x3f\xf0\x3f\xf0\x3f\xf0\x3f\xe0\x3f\xf0\x1f\xe0\x3f'\
b'\xf0\x1f\xc0\x7f\xf8\x07\x00\x7f\xf8\x00\x00\xff\xfc\x00\x01\xff'\
b'\xfe\x00\x03\xff\xfc\x00\x07\xff\xf8\x00\x1f\xff\xf0\x7f\xff\xff'\
b'\xf0\x7f\xff\xff\xf0\x3f\xff\xff\xf0\x00\x1f\xff\xf0\x00\x00\x7f'\
b'\xf0\x00\x00\x1f\xf8\x00\x00\x0f\xfc\x00\x00\x0f\xf0\x00\x00\x07'\
b'\xe0\x7f\x80\x07\xe0\xff\xfe\x07\xc0\xff\xfe\x07\xc0\xff\xfe\x07'\
b'\xc0\xff\xfe\x07\xc0\x3f\xf8\x07\xc0\x00\x00\x0f\xe0\x00\x00\x0f'\
b'\xf0\x00\x00\x1f\xf8\x00\x00\x7f\xfe\x00\x00\xff\xff\xc0\x0f\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\x83\xff\xe0\x1e\x00\x7f\xe0\x18\x00\x3f'\
b'\xe0\x10\x00\x1f\xe0\x00\x00\x1f\xe0\x00\x00\x0f\xe0\x01\xf0\x0f'\
b'\xe0\x07\xf8\x07\xe0\x0f\xf8\x07\xe0\x0f\xf8\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xf8\x1f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xe0\x07\xff\xff\xf0\x07\xff\xff\xf0\x0f\xff'\
b'\xff\xf8\x1f\xff\xff\xfc\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xfc\x00\x0f\xff\xfc\x00\x0f\xff\xfc\x00\x0f\xff\xfc\x00\x0f\xff'\
b'\xfc\x00\x0f\xff\xfc\x00\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff\xf8\x00\x00\x1f'\
b'\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xf8\x00\x00\x1f\xf8\x00\x00\x1f'\
b'\xf8\x00\x00\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xff'\
b'\xff\xff\x80\xff\xff\xff\x00\xff\xff\xff\x00\x7f\xff\xff\x00\x7f'\
b'\xff\xff\x00\x7f\xff\xff\x00\xff\xff\xff\x81\xff\xff\xff\xe3\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\xff\xfe\x00\x00\xff'\
b'\xfe\x00\x00\xff\xfe\x00\x00\xff\xfe\x00\x00\xff\xfe\x00\x00\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff'\
b'\xfd\xff\x80\xff\xf8\xff\x00\xff\xf0\xff\x00\xff\xf0\x3c\x01\xff'\
b'\xe0\x00\x01\xff\xc0\x00\x03\xff\xe0\x00\x07\xff\xf0\x00\x0f\xff'\
b'\xfc\x00\x1f\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xf8\x07\xe0\x1f\xf0\x0f\xe0\x1f\xe0\x1f\xe0\x1f\xc0\x3f'\
b'\xe0\x1f\x80\x7f\xe0\x1f\x00\xff\xe0\x1e\x01\xff\xe0\x1c\x03\xff'\
b'\xe0\x18\x07\xff\xe0\x10\x0f\xff\xe0\x00\x1f\xff\xe0\x00\x3f\xff'\
b'\xe0\x00\x1f\xff\xe0\x00\x0f\xff\xe0\x00\x07\xff\xe0\x00\x07\xff'\
b'\xe0\x0c\x03\xff\xe0\x1c\x01\xff\xe0\x1e\x00\xff\xe0\x1f\x00\xff'\
b'\xe0\x1f\x80\x7f\xe0\x1f\x80\x3f\xe0\x1f\xc0\x1f\xe0\x1f\xe0\x1f'\
b'\xe0\x1f\xf0\x0f\xe0\x1f\xf0\x07\xe0\x1f\xf8\x03\xe0\x1f\xfc\x03'\
b'\xe0\x1f\xfe\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xf0\x00\x0f\xff\xf0\x00\x0f\xff\xf0\x00\x0f\xff\xf0\x00\x0f\xff'\
b'\xf0\x00\x0f\xff\xf8\x00\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x0f'\
b'\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe1\xfc\x1f'\
b'\xc0\xc0\x70\x0f\xc0\x00\x20\x07\xc0\x00\x00\x03\xc0\x00\x00\x03'\
b'\xc0\x00\x00\x03\xc0\x78\x07\x01\xc0\x78\x0f\x01\xc0\x78\x0f\x01'\
b'\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01'\
b'\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01'\
b'\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01'\
b'\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01'\
b'\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01\xc0\xf8\x1f\x01'\
b'\xc0\xf8\x1f\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\x83\xff\xe0\x1e\x00\xff\xe0\x18\x00\x3f'\
b'\xe0\x10\x00\x1f\xe0\x00\x00\x1f\xe0\x00\x00\x0f\xe0\x01\xe0\x0f'\
b'\xe0\x07\xf8\x0f\xe0\x0f\xf8\x07\xe0\x0f\xf8\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xe0\x1f\xfc\x07\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff'\
b'\xff\xc0\x03\xff\xff\x00\x00\xff\xfe\x00\x00\x7f\xfc\x00\x00\x3f'\
b'\xf8\x00\x00\x1f\xf0\x03\xc0\x0f\xf0\x0f\xf0\x0f\xe0\x1f\xf8\x07'\
b'\xe0\x1f\xf8\x07\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03\xc0\x7f\xfe\x03'\
b'\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03'\
b'\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x3f\xfc\x03\xc0\x3f\xfc\x03'\
b'\xe0\x1f\xf8\x07\xe0\x1f\xf8\x07\xf0\x0f\xe0\x0f\xf0\x01\x80\x0f'\
b'\xf8\x00\x00\x1f\xfc\x00\x00\x3f\xfe\x00\x00\x7f\xff\x00\x01\xff'\
b'\xff\xc0\x03\xff\xff\xfe\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\x07\xff\xe0\x3c\x00\xff\xe0\x30\x00\x7f'\
b'\xe0\x20\x00\x3f\xe0\x00\x00\x1f\xe0\x00\x00\x0f\xe0\x03\xe0\x0f'\
b'\xe0\x07\xf8\x07\xe0\x0f\xf8\x07\xe0\x1f\xfc\x03\xe0\x1f\xfc\x03'\
b'\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03'\
b'\xe0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x1f\xfe\x03\xe0\x1f\xfe\x03'\
b'\xe0\x1f\xfe\x03\xe0\x1f\xfc\x03\xe0\x1f\xfc\x03\xe0\x0f\xf8\x07'\
b'\xe0\x07\xf0\x07\xe0\x01\xc0\x0f\xe0\x00\x00\x0f\xe0\x00\x00\x1f'\
b'\xe0\x00\x00\x3f\xe0\x10\x00\x7f\xe0\x18\x01\xff\xe0\x1f\x9f\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xe0\x1f\xff\xff'\
b'\xe0\x1f\xff\xff\xe0\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x7f\xff'\
b'\xff\x80\x0c\x07\xfe\x00\x04\x07\xfc\x00\x00\x07\xf8\x00\x00\x07'\
b'\xf0\x00\x00\x07\xf0\x07\xc0\x07\xe0\x1f\xf0\x07\xe0\x1f\xf0\x07'\
b'\xc0\x3f\xf8\x07\xc0\x3f\xf8\x07\xc0\x7f\xf8\x07\xc0\x7f\xfc\x07'\
b'\xc0\x7f\xfc\x07\xc0\x7f\xfc\x07\xc0\x7f\xfc\x07\xc0\x7f\xfc\x07'\
b'\xc0\x7f\xfc\x07\xc0\x7f\xf8\x07\xc0\x3f\xf8\x07\xc0\x3f\xf8\x07'\
b'\xc0\x3f\xf8\x07\xe0\x1f\xf0\x07\xe0\x0f\xe0\x07\xf0\x03\xc0\x07'\
b'\xf0\x00\x00\x07\xf8\x00\x00\x07\xfc\x00\x00\x07\xff\x00\x04\x07'\
b'\xff\x80\x1c\x07\xff\xf8\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xe0\xff\xfc\x07\x80\x1f\xfc\x07\x00\x07'\
b'\xfc\x06\x00\x03\xfc\x04\x00\x03\xfc\x00\x00\x07\xfc\x00\x1c\x07'\
b'\xfc\x00\xff\x0f\xfc\x01\xff\x8f\xfc\x03\xff\xdf\xfc\x03\xff\xff'\
b'\xfc\x03\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff'\
b'\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff'\
b'\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff'\
b'\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff'\
b'\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x0f\xff'\
b'\xff\xc0\x00\xff\xff\x00\x00\x3f\xfe\x00\x00\x1f\xfc\x00\x00\x0f'\
b'\xf8\x00\x00\x0f\xf8\x07\xf0\x1f\xf8\x0f\xfc\x1f\xf8\x1f\xfe\x3f'\
b'\xf8\x0f\xff\x7f\xf8\x07\xff\xff\xf8\x00\xff\xff\xfc\x00\x1f\xff'\
b'\xfe\x00\x03\xff\xff\x00\x00\xff\xff\x80\x00\x3f\xff\xf0\x00\x1f'\
b'\xff\xfe\x00\x0f\xff\xff\xc0\x0f\xff\xff\xf8\x07\xfd\xff\xfc\x07'\
b'\xf8\xff\xfc\x07\xf8\x7f\xfc\x07\xf0\x1f\xf8\x07\xe0\x03\xe0\x0f'\
b'\xe0\x00\x00\x0f\xf0\x00\x00\x1f\xf8\x00\x00\x3f\xfe\x00\x00\x7f'\
b'\xff\x80\x01\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x3f\xff\xff\xc0\x3f\xff'\
b'\xff\xc0\x3f\xff\xff\xc0\x3f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff'\
b'\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xf0\x00\x00\x1f\xf0\x00\x00\x1f'\
b'\xf0\x00\x00\x1f\xf0\x00\x00\x1f\xf0\x00\x00\x1f\xf0\x00\x00\x1f'\
b'\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\x80\x7f\xff'\
b'\xff\x80\x7f\xff\xff\x80\x7f\xff\xff\xc0\x7f\xff\xff\xc0\x3f\xef'\
b'\xff\xc0\x3f\x8f\xff\xc0\x0c\x0f\xff\xe0\x00\x07\xff\xe0\x00\x07'\
b'\xff\xf0\x00\x07\xff\xf8\x00\x1f\xff\xfc\x00\x7f\xff\xff\xcf\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f'\
b'\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f'\
b'\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f'\
b'\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f'\
b'\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f\xe0\x1f\xf8\x0f'\
b'\xe0\x1f\xf0\x0f\xf0\x1f\xf0\x0f\xf0\x0f\xe0\x0f\xf0\x03\x00\x0f'\
b'\xf8\x00\x00\x0f\xf8\x00\x00\x0f\xfc\x00\x04\x07\xfe\x00\x1c\x07'\
b'\xff\x80\x3c\x07\xff\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x3f\xfe\x03\xc0\x3f\xfe\x03'\
b'\xe0\x3f\xfe\x07\xe0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x1f\xfc\x0f'\
b'\xf0\x0f\xfc\x0f\xf8\x0f\xf8\x0f\xf8\x07\xf8\x1f\xf8\x07\xf8\x1f'\
b'\xfc\x07\xf0\x1f\xfc\x03\xf0\x3f\xfe\x03\xe0\x3f\xfe\x03\xe0\x7f'\
b'\xfe\x01\xe0\x7f\xff\x01\xc0\x7f\xff\x00\xc0\xff\xff\x80\x80\xff'\
b'\xff\x80\x81\xff\xff\x80\x01\xff\xff\xc0\x03\xff\xff\xc0\x03\xff'\
b'\xff\xc0\x03\xff\xff\xe0\x07\xff\xff\xe0\x07\xff\xff\xf0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xf0\x1f\xff\xff\xf8\x1f\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\x81\xff\xff\x81\x81\xff\xff\x81\x81\xff\xff\x81\x80\xfc\x3f\x81'\
b'\x80\xfc\x1f\x81\xc0\xfc\x1f\x81\xc0\xfc\x1f\x81\xc0\xf8\x0f\x83'\
b'\xc0\xf8\x0f\x83\xc0\x78\x0f\x03\xc0\x78\x07\x03\xe0\x70\x07\x03'\
b'\xe0\x70\x07\x03\xe0\x70\x07\x03\xe0\x70\x03\x07\xe0\x20\x83\x07'\
b'\xf0\x20\x83\x07\xf0\x21\x80\x07\xf0\x01\xc0\x07\xf0\x01\xc0\x07'\
b'\xf0\x01\xc0\x0f\xf8\x03\xc0\x0f\xf8\x03\xe0\x0f\xf8\x03\xe0\x0f'\
b'\xf8\x03\xe0\x0f\xf8\x07\xe0\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x1f'\
b'\xfc\x07\xf0\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x3f\xf8\x07\xe0\x1f\xf8\x0f'\
b'\xf0\x1f\xf0\x0f\xf0\x0f\xe0\x1f\xf8\x07\xe0\x3f\xfc\x03\xc0\x3f'\
b'\xfc\x03\xc0\x7f\xfe\x01\x80\xff\xff\x00\x00\xff\xff\x80\x01\xff'\
b'\xff\x80\x03\xff\xff\xc0\x03\xff\xff\xe0\x07\xff\xff\xe0\x0f\xff'\
b'\xff\xf0\x0f\xff\xff\xe0\x07\xff\xff\xe0\x07\xff\xff\xc0\x03\xff'\
b'\xff\x80\x01\xff\xff\x00\x00\xff\xff\x01\x00\xff\xfe\x01\x80\x7f'\
b'\xfc\x03\xc0\x3f\xfc\x07\xe0\x1f\xf8\x07\xe0\x1f\xf0\x0f\xf0\x0f'\
b'\xe0\x1f\xf8\x07\xe0\x1f\xf8\x07\xc0\x3f\xfc\x03\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xc0\x7f\xfc\x07\xe0\x3f\xfc\x07\xe0\x3f\xfc\x07\xe0\x1f\xfc\x07'\
b'\xf0\x1f\xfc\x0f\xf0\x1f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x0f\xf8\x1f'\
b'\xf8\x07\xf8\x1f\xfc\x07\xf0\x1f\xfc\x07\xf0\x3f\xfe\x03\xf0\x3f'\
b'\xfe\x03\xe0\x7f\xfe\x01\xe0\x7f\xff\x01\xc0\x7f\xff\x01\xc0\xff'\
b'\xff\x80\xc0\xff\xff\x80\x80\xff\xff\xc0\x01\xff\xff\xc0\x01\xff'\
b'\xff\xc0\x01\xff\xff\xe0\x03\xff\xff\xe0\x03\xff\xff\xf0\x07\xff'\
b'\xff\xf0\x07\xff\xff\xf0\x07\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xf8\x0f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf0\x3f\xff'\
b'\xe7\xe0\x3f\xff\xe7\xc0\x3f\xff\xc0\x00\x7f\xff\xc0\x00\xff\xff'\
b'\x80\x00\xff\xff\x80\x01\xff\xff\xe0\x07\xff\xff\xf8\x1f\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x00\x00\x0f\xf0\x00\x00\x0f'\
b'\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x0f\xf0\x00\x00\x1f'\
b'\xff\xff\xc0\x3f\xff\xff\xc0\x7f\xff\xff\x80\x7f\xff\xff\x00\xff'\
b'\xff\xfe\x01\xff\xff\xfc\x03\xff\xff\xf8\x07\xff\xff\xf0\x0f\xff'\
b'\xff\xe0\x1f\xff\xff\xe0\x3f\xff\xff\xc0\x3f\xff\xff\x80\x7f\xff'\
b'\xff\x00\xff\xff\xfe\x01\xff\xff\xfc\x03\xff\xff\xf8\x07\xff\xff'\
b'\xf0\x0f\xff\xff\xe0\x00\x00\x03\xe0\x00\x00\x03\xc0\x00\x00\x03'\
b'\xc0\x00\x00\x03\xc0\x00\x00\x03\xc0\x00\x00\x03\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\x1f'\
b'\xff\xfc\x00\x1f\xff\xf8\x00\x1f\xff\xf0\x00\x1f\xff\xe0\x00\x1f'\
b'\xff\xc0\x00\x3f\xff\xc0\x3f\xff\xff\x80\x7f\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x01\xff\xff'\
b'\xfe\x01\xff\xff\xc0\x03\xff\xff\xc0\x07\xff\xff\xc0\x0f\xff\xff'\
b'\xc0\x0f\xff\xff\xc0\x07\xff\xff\xc0\x03\xff\xff\xfe\x03\xff\xff'\
b'\xff\x01\xff\xff\xff\x01\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\xff\xff'\
b'\xff\x80\xff\xff\xff\x80\xff\xff\xff\x80\x7f\xff\xff\x80\x3f\xff'\
b'\xff\xc0\x00\x3f\xff\xc0\x00\x3f\xff\xe0\x00\x3f\xff\xf0\x00\x3f'\
b'\xff\xfc\x00\x3f\xff\xff\x80\x3f\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff\xff\xf8\x1f\xff'\
b'\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x07\xff\xff'\
b'\xe0\x01\xff\xff\xe0\x00\x7f\xff\xe0\x00\x3f\xff\xe0\x00\x1f\xff'\
b'\xe0\x00\x1f\xff\xff\xf0\x0f\xff\xff\xf8\x0f\xff\xff\xf8\x0f\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfe\x03\xff\xff\xff\x00\x1f\xff\xff\x00\x1f\xff\xff\xc0\x1f'\
b'\xff\xff\xc0\x1f\xff\xff\x80\x1f\xff\xff\x00\x1f\xff\xfe\x01\xff'\
b'\xff\xfe\x03\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff\xff\xfc\x07\xff'\
b'\xff\xfc\x07\xff\xff\xf8\x07\xff\xff\xf8\x0f\xff\xff\xf0\x0f\xff'\
b'\xe0\x00\x0f\xff\xe0\x00\x1f\xff\xe0\x00\x3f\xff\xe0\x00\x7f\xff'\
b'\xe0\x00\xff\xff\xe0\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x81\xff\xdf\xfc\x00\x7f\x8f'\
b'\xf8\x00\x1f\x07\xf0\x00\x00\x03\xe0\x00\x00\x03\xf0\x00\x00\x07'\
b'\xf8\x3e\x00\x0f\xfc\xff\x80\x1f\xff\xff\xe0\x7f\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b''
BITMAP = memoryview(_BITMAP)
| 72.508125 | 106 | 0.67127 | 26,974 | 116,013 | 2.887002 | 0.003633 | 0.95824 | 1.180676 | 1.429386 | 0.946092 | 0.920436 | 0.889694 | 0.857436 | 0.822508 | 0.791304 | 0 | 0.113283 | 0.068734 | 116,013 | 1,599 | 107 | 72.553471 | 0.607503 | 0.001164 | 0 | 0.545169 | 0 | 0 | 0.874907 | 0.874881 | 0 | 1 | 0.000104 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
04ba09daa4833acbd93990ac44a67e6868f159b1 | 37 | py | Python | processors/flagging_processor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | processors/flagging_processor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | processors/flagging_processor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | import os
#TODO
print('Hello Flags')
| 9.25 | 20 | 0.72973 | 6 | 37 | 4.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135135 | 37 | 3 | 21 | 12.333333 | 0.84375 | 0.108108 | 0 | 0 | 0 | 0 | 0.34375 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
04ba28a9f7dc5fff73bd97608b78e2d4c11ff423 | 4,741 | py | Python | tests/test_apis.py | imperial-genomics-facility/IGFPortal | 0a61ecbfc1ac71775ad12d7cf13d09512ad71380 | [
"Apache-2.0"
] | null | null | null | tests/test_apis.py | imperial-genomics-facility/IGFPortal | 0a61ecbfc1ac71775ad12d7cf13d09512ad71380 | [
"Apache-2.0"
] | null | null | null | tests/test_apis.py | imperial-genomics-facility/IGFPortal | 0a61ecbfc1ac71775ad12d7cf13d09512ad71380 | [
"Apache-2.0"
] | null | null | null | import unittest, json
from app import appbuilder, db
from app.interop_data_api import search_interop_for_run
from app.interop_data_api import add_interop_data
from app.interop_data_api import edit_interop_data
from app.interop_data_api import add_or_edit_interop_data
from app.pre_demultiplexing_data_api import search_predemultiplexing_data
from app.pre_demultiplexing_data_api import add_predemultiplexing_data
from app.pre_demultiplexing_data_api import edit_predemultiplexing_data
from app.pre_demultiplexing_data_api import add_or_edit_predemultiplexing_data
class TestApiCase(unittest.TestCase):
def setUp(self):
db.create_all()
self.json_file = "data/interop_example.json"
self.demult_file = "data/demultiplexing_example.json"
def tearDown(self):
db.drop_all()
def test_search_interop_for_run(self):
result = \
search_interop_for_run(run_name='AAAA')
self.assertTrue(result is None)
def test_add_interop_data(self):
with open(self.json_file, 'r') as fp:
json_data = json.load(fp)
run_name = json_data.get("run_name")
result = \
search_interop_for_run(run_name=run_name)
self.assertTrue(result is None)
add_interop_data(run_data=json_data)
result = \
search_interop_for_run(run_name=run_name)
self.assertTrue(result is not None)
def test_edit_interop_data(self):
with open(self.json_file, 'r') as fp:
json_data = json.load(fp)
run_name = json_data.get("run_name")
add_interop_data(run_data=json_data)
json_data['table_data'] = "AAAAA"
edit_interop_data(run_data=json_data)
result = \
search_interop_for_run(run_name=run_name)
self.assertEqual(result.table_data, "AAAAA")
def test_add_or_edit_interop_data(self):
with open(self.json_file, 'r') as fp:
json_data = json.load(fp)
run_name = json_data.get("run_name")
add_or_edit_interop_data(run_data=json_data)
result = \
search_interop_for_run(run_name=run_name)
self.assertTrue(result is not None)
json_data['table_data'] = "AAAAA"
add_or_edit_interop_data(run_data=json_data)
result = \
search_interop_for_run(run_name=run_name)
self.assertEqual(result.table_data, "AAAAA")
def test_search_predemultiplexing_data(self):
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is None)
def test_add_predemultiplexing_data(self):
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is None)
with open(self.demult_file, 'r') as fp:
json_data = json.load(fp)
add_predemultiplexing_data(data=json_data)
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is not None)
def test_edit_predemultiplexing_data(self):
with open(self.demult_file, 'r') as fp:
json_data = json.load(fp)
add_predemultiplexing_data(data=json_data)
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is not None)
json_data["flowcell_cluster_plot"] = "CCCC"
edit_predemultiplexing_data(data=json_data)
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertEqual(result.flowcell_cluster_plot, "CCCC")
def test_add_or_edit_predemultiplexing_data(self):
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is None)
with open(self.demult_file, 'r') as fp:
json_data = json.load(fp)
add_or_edit_predemultiplexing_data(data=json_data)
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertTrue(result is not None)
json_data["flowcell_cluster_plot"] = "CCCC"
add_or_edit_predemultiplexing_data(data=json_data)
result = \
search_predemultiplexing_data(
run_name="AAAA",
samplesheet_tag="BBBB")
self.assertEqual(result.flowcell_cluster_plot, "CCCC")
if __name__ == '__main__':
unittest.main() | 37.928 | 78 | 0.652605 | 585 | 4,741 | 4.904274 | 0.100855 | 0.060997 | 0.046009 | 0.076682 | 0.892994 | 0.85779 | 0.844197 | 0.811433 | 0.75183 | 0.72534 | 0 | 0 | 0.265134 | 4,741 | 125 | 79 | 37.928 | 0.823479 | 0 | 0 | 0.736842 | 0 | 0 | 0.05504 | 0.020877 | 0 | 0 | 0 | 0 | 0.122807 | 1 | 0.087719 | false | 0 | 0.087719 | 0 | 0.184211 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
04ffb4fc46967cb82298cd2b01ee742954c7faac | 145 | py | Python | MLPcluster/expBuildLr.py | ryan75195/Deep-Learning | 899e593cc8e8a7857b93b8d2885250e4b0cb76b6 | [
"MIT"
] | null | null | null | MLPcluster/expBuildLr.py | ryan75195/Deep-Learning | 899e593cc8e8a7857b93b8d2885250e4b0cb76b6 | [
"MIT"
] | null | null | null | MLPcluster/expBuildLr.py | ryan75195/Deep-Learning | 899e593cc8e8a7857b93b8d2885250e4b0cb76b6 | [
"MIT"
] | null | null | null | import train_model
#resnet18
train_model.run_experiment(34,1e-3,0)
train_model.run_experiment(34,1e-4,0)
train_model.run_experiment(34,1e-5,0)
| 18.125 | 37 | 0.813793 | 28 | 145 | 3.964286 | 0.428571 | 0.36036 | 0.351351 | 0.621622 | 0.747748 | 0.747748 | 0.504505 | 0 | 0 | 0 | 0 | 0.124088 | 0.055172 | 145 | 7 | 38 | 20.714286 | 0.686131 | 0.055172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8e4103c8a3283ea4881e66a807c0380d744da8f3 | 5,351 | py | Python | basketball_reference_web_scraper/client.py | Yotamho/basketball_reference_web_scraper | 53a1b4fa4a2e8e23c3be3045956dffc80b395b0b | [
"MIT"
] | null | null | null | basketball_reference_web_scraper/client.py | Yotamho/basketball_reference_web_scraper | 53a1b4fa4a2e8e23c3be3045956dffc80b395b0b | [
"MIT"
] | null | null | null | basketball_reference_web_scraper/client.py | Yotamho/basketball_reference_web_scraper | 53a1b4fa4a2e8e23c3be3045956dffc80b395b0b | [
"MIT"
] | 1 | 2020-03-18T07:16:43.000Z | 2020-03-18T07:16:43.000Z | import os
import requests
from basketball_reference_web_scraper import http_client
from basketball_reference_web_scraper.errors import InvalidSeason, InvalidDate
from basketball_reference_web_scraper.json_encoders import BasketballReferenceJSONEncoder
from basketball_reference_web_scraper.output import box_scores_to_csv, schedule_to_csv, players_season_totals_to_csv, \
players_advanced_season_totals_to_csv, team_box_scores_to_csv, play_by_play_to_csv
from basketball_reference_web_scraper.output import output
from basketball_reference_web_scraper.data import TEAM_TO_TEAM_ABBREVIATION, OutputType
def player_box_scores(day, month, year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.player_box_scores(day=day, month=month, year=year)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.not_found:
raise InvalidDate(day=day, month=month, year=year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=box_scores_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
def season_schedule(season_end_year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.season_schedule(season_end_year)
except requests.exceptions.HTTPError as http_error:
# https://github.com/requests/requests/blob/master/requests/status_codes.py#L58
if http_error.response.status_code == requests.codes.not_found:
raise InvalidSeason(season_end_year=season_end_year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=schedule_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
def players_season_totals(season_end_year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.players_season_totals(season_end_year)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.not_found:
raise InvalidSeason(season_end_year=season_end_year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=players_season_totals_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
def players_advanced_season_totals(season_end_year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.players_advanced_season_totals(season_end_year)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.not_found:
raise InvalidSeason(season_end_year=season_end_year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=players_advanced_season_totals_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
def team_box_scores(day, month, year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.team_box_scores(day=day, month=month, year=year)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.not_found:
raise InvalidDate(day=day, month=month, year=year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=team_box_scores_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
def play_by_play(home_team, day, month, year, output_type=None, output_file_path=None, output_write_option=None,
json_options=None):
try:
values = http_client.play_by_play(home_team=home_team, day=day, month=month, year=year)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.not_found:
raise InvalidDate(day=day, month=month, year=year)
else:
raise http_error
return output(
values=values,
output_type=output_type,
output_file_path=output_file_path,
output_write_option=output_write_option,
csv_writer=play_by_play_to_csv,
encoder=BasketballReferenceJSONEncoder,
json_options=json_options,
)
| 40.233083 | 119 | 0.723416 | 668 | 5,351 | 5.392216 | 0.107784 | 0.049972 | 0.069961 | 0.06663 | 0.916713 | 0.842587 | 0.831483 | 0.802054 | 0.784287 | 0.784287 | 0 | 0.000475 | 0.213418 | 5,351 | 132 | 120 | 40.537879 | 0.85531 | 0.014203 | 0 | 0.717949 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051282 | false | 0 | 0.068376 | 0 | 0.17094 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f3e6d91370c3c748a4ebc0cd0d9c98cb5c746548 | 4,652 | py | Python | myapp/migrations/0002_paper_student_data_teacher_edu_teachers_data.py | sharmin6630/Project-Distribution | 32692653c309b417187ab0299f074a38d1a5bd3e | [
"MIT"
] | null | null | null | myapp/migrations/0002_paper_student_data_teacher_edu_teachers_data.py | sharmin6630/Project-Distribution | 32692653c309b417187ab0299f074a38d1a5bd3e | [
"MIT"
] | 1 | 2021-08-04T15:41:05.000Z | 2021-08-04T15:41:05.000Z | myapp/migrations/0002_paper_student_data_teacher_edu_teachers_data.py | sharmin6630/Project-Distribution | 32692653c309b417187ab0299f074a38d1a5bd3e | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2021-06-24 08:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('myapp', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Teachers_data',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_of_birth', models.DateField(blank=True, default=None, null=True)),
('gender', models.CharField(blank=True, default=None, max_length=20, null=True)),
('address', models.TextField(blank=True, default=None, null=True)),
('mobile_no', models.CharField(blank=True, default=None, max_length=20, null=True)),
('designation', models.TextField(blank=True, default=None, null=True)),
('photos', models.FileField(blank=True, default=None, null=True, upload_to='teacher/')),
('linkedin', models.CharField(blank=True, default=None, max_length=55, null=True)),
('github', models.CharField(blank=True, default=None, max_length=55, null=True)),
('user_id', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Teacher_edu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('MSc_Institute_name', models.CharField(blank=True, default=None, max_length=55, null=True)),
('MSc_Institute_Country', models.CharField(blank=True, default=None, max_length=55, null=True)),
('MSc_start_date', models.CharField(blank=True, default=None, max_length=55, null=True)),
('MSc_end_date', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Phd_Institute_name', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Phd_Institute_Country', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Phd_start_date', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Phd_end_date', models.CharField(blank=True, default=None, max_length=55, null=True)),
('user_id', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Student_data',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_of_birth', models.DateField(blank=True, default=None, null=True)),
('address', models.TextField(blank=True, default=None, null=True)),
('gender', models.CharField(blank=True, default=None, max_length=20, null=True)),
('mobile_no', models.CharField(blank=True, default=None, max_length=20, null=True)),
('blood_group', models.CharField(blank=True, default=None, max_length=20, null=True)),
('photos', models.FileField(blank=True, default=None, null=True, upload_to='images/')),
('major_cgpa', models.FloatField(blank=True, default=None, max_length=5, null=True)),
('total_cgpa', models.FloatField(blank=True, default=None, max_length=5, null=True)),
('linkedin', models.CharField(blank=True, default=None, max_length=55, null=True)),
('github', models.CharField(blank=True, default=None, max_length=55, null=True)),
('user_id', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Paper',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Research_area', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Published_Paper', models.CharField(blank=True, default=None, max_length=55, null=True)),
('Published_Journal', models.CharField(blank=True, default=None, max_length=55, null=True)),
('user_id', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 63.726027 | 132 | 0.625537 | 549 | 4,652 | 5.147541 | 0.173042 | 0.092357 | 0.16419 | 0.205237 | 0.886412 | 0.886412 | 0.886412 | 0.886412 | 0.874027 | 0.874027 | 0 | 0.018076 | 0.226999 | 4,652 | 72 | 133 | 64.611111 | 0.767798 | 0.009673 | 0 | 0.545455 | 1 | 0 | 0.096417 | 0.009121 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6d03b368d1fbc5f24308d646cc718adef41907e2 | 188 | py | Python | via/requests_tools/__init__.py | hypothesis/via | 942dea9d829223464896a97f7030872d34e6f071 | [
"BSD-2-Clause"
] | 113 | 2015-02-13T23:22:29.000Z | 2021-09-09T19:42:10.000Z | via/requests_tools/__init__.py | hypothesis/via | 942dea9d829223464896a97f7030872d34e6f071 | [
"BSD-2-Clause"
] | 265 | 2015-02-05T15:32:47.000Z | 2022-03-31T17:05:43.000Z | via/requests_tools/__init__.py | hypothesis/via | 942dea9d829223464896a97f7030872d34e6f071 | [
"BSD-2-Clause"
] | 70 | 2015-04-17T23:52:08.000Z | 2022-03-14T16:50:31.000Z | from via.requests_tools.error_handling import handle_errors
from via.requests_tools.headers import add_request_headers, clean_headers
from via.requests_tools.streaming import stream_bytes
| 47 | 73 | 0.893617 | 28 | 188 | 5.678571 | 0.571429 | 0.132075 | 0.283019 | 0.377358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069149 | 188 | 3 | 74 | 62.666667 | 0.908571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6d08135d3766b4ee0155ceeb69f67a07413b8b13 | 117 | py | Python | mmdet/version.py | trisct/DNL-Object-Detection | 136619a555932ebd2051a3de222fe2ac006e6034 | [
"Apache-2.0"
] | null | null | null | mmdet/version.py | trisct/DNL-Object-Detection | 136619a555932ebd2051a3de222fe2ac006e6034 | [
"Apache-2.0"
] | null | null | null | mmdet/version.py | trisct/DNL-Object-Detection | 136619a555932ebd2051a3de222fe2ac006e6034 | [
"Apache-2.0"
] | null | null | null | # GENERATED VERSION FILE
# TIME: Sun Mar 28 15:21:08 2021
__version__ = '1.0.rc0+6ae8884'
short_version = '1.0.rc0'
| 19.5 | 32 | 0.709402 | 21 | 117 | 3.714286 | 0.761905 | 0.205128 | 0.230769 | 0.307692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.232323 | 0.153846 | 117 | 5 | 33 | 23.4 | 0.555556 | 0.452991 | 0 | 0 | 1 | 0 | 0.360656 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6d24bbf38390d0aa7121c0932f7d869a7a0289bb | 3,250 | py | Python | tests/test_episode.py | benlansdell/agent-trainer | 8ae3b77df58d676a4517b48f24f8959a00ba0aa8 | [
"MIT"
] | 71 | 2016-10-06T13:50:28.000Z | 2022-03-04T06:44:38.000Z | tests/test_episode.py | benlansdell/agent-trainer | 8ae3b77df58d676a4517b48f24f8959a00ba0aa8 | [
"MIT"
] | 3 | 2016-10-07T09:55:12.000Z | 2018-01-26T15:20:20.000Z | tests/test_episode.py | benlansdell/agent-trainer | 8ae3b77df58d676a4517b48f24f8959a00ba0aa8 | [
"MIT"
] | 32 | 2016-10-06T21:27:50.000Z | 2021-11-09T11:01:30.000Z | from unittest import TestCase
from mock import Mock
from agent.game.action import Action
from agent.trainer.episode import EpisodeActionlessBegginingPreventer
class TestEpisodeActionlessBegginingPreventer(TestCase):
def test_prevent(self):
no_action_preventer = EpisodeActionlessBegginingPreventer(Mock(MAXIMUM_NO_ACTIONS_BEGGINING_EPISODE=3))
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction, random_action_seed=45), Action.TurnLeft)
def test_keep_preventing_until_different_action_is_issued(self):
no_action_preventer = EpisodeActionlessBegginingPreventer(Mock(MAXIMUM_NO_ACTIONS_BEGGINING_EPISODE=3))
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction, random_action_seed=45), Action.TurnLeft)
self.assertEqual(no_action_preventer.prevent(Action.NoAction, random_action_seed=20), Action.BrakeAndTurnRight)
self.assertEqual(no_action_preventer.prevent(Action.Brake), Action.Brake)
def test_do_not_prevent_if_not_in_beggining_sequence(self):
no_action_preventer = EpisodeActionlessBegginingPreventer(Mock(MAXIMUM_NO_ACTIONS_BEGGINING_EPISODE=3))
self.assertEqual(no_action_preventer.prevent(Action.Brake), Action.Brake)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
def test_do_not_prevent_if_beggining_sequence_has_action_before_limit_is_reached(self):
no_action_preventer = EpisodeActionlessBegginingPreventer(Mock(MAXIMUM_NO_ACTIONS_BEGGINING_EPISODE=3))
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.Accelerate), Action.Accelerate)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
self.assertEqual(no_action_preventer.prevent(Action.NoAction), Action.NoAction)
| 70.652174 | 121 | 0.749538 | 349 | 3,250 | 6.681948 | 0.137536 | 0.222127 | 0.196827 | 0.226844 | 0.840051 | 0.840051 | 0.822041 | 0.822041 | 0.822041 | 0.822041 | 0 | 0.003715 | 0.171692 | 3,250 | 45 | 122 | 72.222222 | 0.862556 | 0 | 0 | 0.694444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.638889 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
6d49ee4ce9a6086da0f88bd9f0c2ad35c4b4195e | 66,516 | py | Python | swagger_client/api/lists__segments_api.py | klaviyo/klaviyo-python | 8f95cdaf1469711ab99ecfbfb64ce743451c490d | [
"MIT"
] | 10 | 2021-12-21T02:08:00.000Z | 2022-02-24T05:37:20.000Z | swagger_client/api/lists__segments_api.py | klaviyo/klaviyo-python | 8f95cdaf1469711ab99ecfbfb64ce743451c490d | [
"MIT"
] | 3 | 2022-02-02T09:07:40.000Z | 2022-03-04T15:31:11.000Z | swagger_client/api/lists__segments_api.py | klaviyo/klaviyo-python | 8f95cdaf1469711ab99ecfbfb64ce743451c490d | [
"MIT"
] | 2 | 2021-12-21T02:07:53.000Z | 2022-02-22T08:05:41.000Z | # coding: utf-8
"""
Klaviyo API
Empowering creators to own their destiny # noqa: E501
OpenAPI spec version: 2022.03.29
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ListsSegmentsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.warned = []
def add_members(self, list_id, **kwargs): # noqa: E501
"""Add Members to a List # noqa: E501
Adds profiles to a list. This endpoint is functionally equivalent to adding profiles to a list via a CSV upload and will immediately add profiles to the list. If you would like to subscribe profiles to a list and use the double opt-in settings for the list, please use the subscribe endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_members(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdMembersBody body:
:return: list[InlineResponse2006]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_members_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.add_members_with_http_info(list_id, **kwargs) # noqa: E501
return data
def add_members_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Add Members to a List # noqa: E501
Adds profiles to a list. This endpoint is functionally equivalent to adding profiles to a list via a CSV upload and will immediately add profiles to the list. If you would like to subscribe profiles to a list and use the double opt-in settings for the list, please use the subscribe endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_members_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdMembersBody body:
:return: list[InlineResponse2006]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `add_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/members', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InlineResponse2006]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_list(self, **kwargs): # noqa: E501
"""Create List # noqa: E501
Creates a new list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_name:
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_list_with_http_info(**kwargs) # noqa: E501
return data
def create_list_with_http_info(self, **kwargs): # noqa: E501
"""Create List # noqa: E501
Creates a new list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_name:
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'list_name' in params:
form_params.append(('list_name', params['list_name'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/lists', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_list(self, list_id, **kwargs): # noqa: E501
"""Delete List # noqa: E501
Deletes a list from an account. This is a destructive operation and cannot be undone. It will also remove flow triggers associated with the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_list(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_list_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.delete_list_with_http_info(list_id, **kwargs) # noqa: E501
return data
def delete_list_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Delete List # noqa: E501
Deletes a list from an account. This is a destructive operation and cannot be undone. It will also remove flow triggers associated with the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_list_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `delete_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def exclude_globally(self, **kwargs): # noqa: E501
"""Exclude Profile From All Email # noqa: E501
Marks a person as excluded from all email. This works the same way as manually excluding someone via the excluded people page. Someone who is excluded will no longer receive any campaigns or flow emails. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.exclude_globally(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email:
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.exclude_globally_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.exclude_globally_with_http_info(**kwargs) # noqa: E501
return data
def exclude_globally_with_http_info(self, **kwargs): # noqa: E501
"""Exclude Profile From All Email # noqa: E501
Marks a person as excluded from all email. This works the same way as manually excluding someone via the excluded people page. Someone who is excluded will no longer receive any campaigns or flow emails. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.exclude_globally_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str email:
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['email'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method exclude_globally" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'email' in params:
form_params.append(('email', params['email'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v1/people/exclusions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_global_exclusions(self, **kwargs): # noqa: E501
"""Get Global Exclusions & Unsubscribes # noqa: E501
Returns global exclusions/unsubscribes. Global exclusions are distinct from list exclusions in that these email addresses will not receive any emails from any list. Typically, when someone unsubscribes from a campaign, they are only unsubscribed from that list and are not globally unsubscribed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_global_exclusions(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reason: Filter results based on the reason for someone being excluded. The possible values are `unsubscribed`, `bounced`, `invalid_email`, `reported_spam` and `manually_excluded`. Only a single value may be specified at a time. Defaults to return all profiles regardless of reason.
:param str sort: Sort order to apply to results, either ascending or descending. Valid values are `asc` or `desc`. Defaults to `asc`.
:param int count: For pagination, the number of results to return. Default = 500
:param int page: For pagination, which page of results to return. Default = 0
:return: GlobalExclusionResponseData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_global_exclusions_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_global_exclusions_with_http_info(**kwargs) # noqa: E501
return data
def get_global_exclusions_with_http_info(self, **kwargs): # noqa: E501
"""Get Global Exclusions & Unsubscribes # noqa: E501
Returns global exclusions/unsubscribes. Global exclusions are distinct from list exclusions in that these email addresses will not receive any emails from any list. Typically, when someone unsubscribes from a campaign, they are only unsubscribed from that list and are not globally unsubscribed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_global_exclusions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reason: Filter results based on the reason for someone being excluded. The possible values are `unsubscribed`, `bounced`, `invalid_email`, `reported_spam` and `manually_excluded`. Only a single value may be specified at a time. Defaults to return all profiles regardless of reason.
:param str sort: Sort order to apply to results, either ascending or descending. Valid values are `asc` or `desc`. Defaults to `asc`.
:param int count: For pagination, the number of results to return. Default = 500
:param int page: For pagination, which page of results to return. Default = 0
:return: GlobalExclusionResponseData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reason', 'sort', 'count', 'page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_global_exclusions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'reason' in params:
query_params.append(('reason', params['reason'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v1/people/exclusions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GlobalExclusionResponseData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_list_exclusions(self, list_id, **kwargs): # noqa: E501
"""Get All Exclusions for a List # noqa: E501
Gets all of the emails and phone numbers that have been excluded from a list along with the exclusion reasons and exclusion time. This endpoint uses batching to return the records, so for a large list multiple calls will need to be made to get all of the records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_exclusions(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param int marker: A marker value returned by a previous GET call. Use this to grab the next batch of records.
:return: InlineResponse2007
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_list_exclusions_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.get_list_exclusions_with_http_info(list_id, **kwargs) # noqa: E501
return data
def get_list_exclusions_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Get All Exclusions for a List # noqa: E501
Gets all of the emails and phone numbers that have been excluded from a list along with the exclusion reasons and exclusion time. This endpoint uses batching to return the records, so for a large list multiple calls will need to be made to get all of the records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_exclusions_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param int marker: A marker value returned by a previous GET call. Use this to grab the next batch of records.
:return: InlineResponse2007
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_list_exclusions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `get_list_exclusions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
if 'marker' in params:
query_params.append(('marker', params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/exclusions/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2007', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_list_info(self, list_id, **kwargs): # noqa: E501
"""Get List Info # noqa: E501
Returns information about a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:return: InlineResponse2005
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_list_info_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.get_list_info_with_http_info(list_id, **kwargs) # noqa: E501
return data
def get_list_info_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Get List Info # noqa: E501
Returns information about a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_info_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:return: InlineResponse2005
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_list_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `get_list_info`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2005', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_list_members(self, list_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a List # noqa: E501
Checks if one or more emails, phone numbers, or push tokens are in a given list. No distinction is made between a person not being in a given list, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_members(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_list_members_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.get_list_members_with_http_info(list_id, **kwargs) # noqa: E501
return data
def get_list_members_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a List # noqa: E501
Checks if one or more emails, phone numbers, or push tokens are in a given list. No distinction is made between a person not being in a given list, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_members_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_list_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `get_list_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/get-members', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CheckMembershipResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_list_subscriptions(self, list_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a List and not Suppressed # noqa: E501
Checks if one or more emails are in a given list and not suppressed. No distinction is made between a person not being in a given list, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_subscriptions(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_list_subscriptions_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.get_list_subscriptions_with_http_info(list_id, **kwargs) # noqa: E501
return data
def get_list_subscriptions_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a List and not Suppressed # noqa: E501
Checks if one or more emails are in a given list and not suppressed. No distinction is made between a person not being in a given list, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_list_subscriptions_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_list_subscriptions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `get_list_subscriptions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/get-list-subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CheckMembershipResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_lists(self, **kwargs): # noqa: E501
"""Get Lists # noqa: E501
Returns a listing of all of the lists in an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_lists(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[InlineResponse2003]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_lists_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_lists_with_http_info(**kwargs) # noqa: E501
return data
def get_lists_with_http_info(self, **kwargs): # noqa: E501
"""Get Lists # noqa: E501
Returns a listing of all of the lists in an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_lists_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[InlineResponse2003]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_lists" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/lists', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InlineResponse2003]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_members(self, list_or_segment_id, **kwargs): # noqa: E501
"""Get List and Segment Members # noqa: E501
Gets all of the emails, phone numbers, and push tokens for profiles in a given list or segment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_members(list_or_segment_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_or_segment_id: (required)
:param int marker: A marker value returned by a previous GET call. Use this to grab the next batch of records.
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_members_with_http_info(list_or_segment_id, **kwargs) # noqa: E501
else:
(data) = self.get_members_with_http_info(list_or_segment_id, **kwargs) # noqa: E501
return data
def get_members_with_http_info(self, list_or_segment_id, **kwargs): # noqa: E501
"""Get List and Segment Members # noqa: E501
Gets all of the emails, phone numbers, and push tokens for profiles in a given list or segment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_members_with_http_info(list_or_segment_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_or_segment_id: (required)
:param int marker: A marker value returned by a previous GET call. Use this to grab the next batch of records.
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_or_segment_id', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_or_segment_id' is set
if ('list_or_segment_id' not in params or
params['list_or_segment_id'] is None):
raise ValueError("Missing the required parameter `list_or_segment_id` when calling `get_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_or_segment_id' in params:
path_params['list_or_segment_id'] = params['list_or_segment_id'] # noqa: E501
query_params = []
if 'marker' in params:
query_params.append(('marker', params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/group/{list_or_segment_id}/members/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2008', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_segment_members(self, segment_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a Segment # noqa: E501
Checks if one or more emails, phone numbers, or push tokens are in a given segment. No distinction is made between a person not being in a given segment, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_segment_members(segment_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str segment_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_segment_members_with_http_info(segment_id, **kwargs) # noqa: E501
else:
(data) = self.get_segment_members_with_http_info(segment_id, **kwargs) # noqa: E501
return data
def get_segment_members_with_http_info(self, segment_id, **kwargs): # noqa: E501
"""Check if Profiles Are in a Segment # noqa: E501
Checks if one or more emails, phone numbers, or push tokens are in a given segment. No distinction is made between a person not being in a given segment, and not being present in Klaviyo at all. Can check up to a maximum of 100 emails at a time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_segment_members_with_http_info(segment_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str segment_id: (required)
:param CheckMembershipRequest body:
:return: CheckMembershipResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['segment_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_segment_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'segment_id' is set
if ('segment_id' not in params or
params['segment_id'] is None):
raise ValueError("Missing the required parameter `segment_id` when calling `get_segment_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'segment_id' in params:
path_params['segment_id'] = params['segment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/segment/{segment_id}/get-members', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CheckMembershipResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_members(self, list_id, **kwargs): # noqa: E501
"""Remove Profiles From List # noqa: E501
Removes profiles from a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_members(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdMembersBody1 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_members_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.remove_members_with_http_info(list_id, **kwargs) # noqa: E501
return data
def remove_members_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Remove Profiles From List # noqa: E501
Removes profiles from a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_members_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdMembersBody1 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `remove_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/members', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def subscribe(self, list_id, **kwargs): # noqa: E501
"""Subscribe Profiles to List # noqa: E501
Subscribes profiles to a list. Profiles will be single or double opted into the specified list in accordance with that list’s settings. **Note:** If you have double opt-in enabled (default behavior), users will not be added to list until they opt-in, and so API will respond with an empty list: `[]` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscribe(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdSubscribeBody body:
:return: ListSubscribePostResponseData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.subscribe_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.subscribe_with_http_info(list_id, **kwargs) # noqa: E501
return data
def subscribe_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Subscribe Profiles to List # noqa: E501
Subscribes profiles to a list. Profiles will be single or double opted into the specified list in accordance with that list’s settings. **Note:** If you have double opt-in enabled (default behavior), users will not be added to list until they opt-in, and so API will respond with an empty list: `[]` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscribe_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param ListIdSubscribeBody body:
:return: ListSubscribePostResponseData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method subscribe" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `subscribe`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/subscribe', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListSubscribePostResponseData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unsubscribe(self, list_id, **kwargs): # noqa: E501
"""Unsubscribe Profiles From List # noqa: E501
Unsubscribes and removes profiles from a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unsubscribe(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param dict(str, object) body: Unsubscribe and remove profiles from a list.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unsubscribe_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.unsubscribe_with_http_info(list_id, **kwargs) # noqa: E501
return data
def unsubscribe_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Unsubscribe Profiles From List # noqa: E501
Unsubscribes and removes profiles from a list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unsubscribe_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param dict(str, object) body: Unsubscribe and remove profiles from a list.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unsubscribe" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `unsubscribe`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}/subscribe', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_list_name(self, list_id, **kwargs): # noqa: E501
"""Update List Name # noqa: E501
Updates a list's name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_list_name(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param str list_name:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_list_name_with_http_info(list_id, **kwargs) # noqa: E501
else:
(data) = self.update_list_name_with_http_info(list_id, **kwargs) # noqa: E501
return data
def update_list_name_with_http_info(self, list_id, **kwargs): # noqa: E501
"""Update List Name # noqa: E501
Updates a list's name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_list_name_with_http_info(list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: (required)
:param str list_name:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'list_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_list_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling `update_list_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'list_name' in params:
form_params.append(('list_name', params['list_name'])) # noqa: E501
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/v2/list/{list_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.48448 | 321 | 0.614724 | 7,995 | 66,516 | 4.884053 | 0.042402 | 0.04958 | 0.022946 | 0.029502 | 0.968884 | 0.961381 | 0.956925 | 0.951726 | 0.94896 | 0.939485 | 0 | 0.018601 | 0.29847 | 66,516 | 1,642 | 322 | 40.509135 | 0.818207 | 0.373038 | 0 | 0.80485 | 1 | 0 | 0.182196 | 0.046275 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038106 | false | 0 | 0.004619 | 0 | 0.099307 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
edf7f4c60733e86830c214fb03aa20a4d1623585 | 60 | py | Python | pytorch/eval.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | pytorch/eval.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | pytorch/eval.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | 1 | 2018-06-30T07:07:25.000Z | 2018-06-30T07:07:25.000Z | import torch
from torch import nn
def evaluate():
pass
| 10 | 20 | 0.716667 | 9 | 60 | 4.777778 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.233333 | 60 | 5 | 21 | 12 | 0.934783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0.25 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
b62c2af940935b8d6b92572371051d25e0ddad3a | 25 | py | Python | w01/e10.py | Luccifer/PythonCoruseraHSE | 653d6a24325789342f0d033717ba548dc6e90483 | [
"Unlicense"
] | 1 | 2020-01-12T12:55:07.000Z | 2020-01-12T12:55:07.000Z | w01/e10.py | Luccifer/PythonCourseraHSE | 653d6a24325789342f0d033717ba548dc6e90483 | [
"Unlicense"
] | null | null | null | w01/e10.py | Luccifer/PythonCourseraHSE | 653d6a24325789342f0d033717ba548dc6e90483 | [
"Unlicense"
] | null | null | null | # 100A
print('A' * 100)
| 6.25 | 16 | 0.52 | 4 | 25 | 3.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.315789 | 0.24 | 25 | 3 | 17 | 8.333333 | 0.368421 | 0.16 | 0 | 0 | 0 | 0 | 0.052632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
b666a849976e7a8e0c4af9de3d9145f1f3c0152f | 97,024 | py | Python | eng_vocab.py | TheBigBen1/cleanco | 3f9dfd695a01e90d962631553637d0ed3806dcc8 | [
"MIT"
] | null | null | null | eng_vocab.py | TheBigBen1/cleanco | 3f9dfd695a01e90d962631553637d0ed3806dcc8 | [
"MIT"
] | null | null | null | eng_vocab.py | TheBigBen1/cleanco | 3f9dfd695a01e90d962631553637d0ed3806dcc8 | [
"MIT"
] | null | null | null | eng_words = ['telecommunications', 'responsibilities', 'sublimedirectory', 'characterization', 'recommendations', 'characteristics', 'representatives', 'pharmaceuticals', 'congratulations', 'representations', 'troubleshooting', 'internationally', 'confidentiality', 'instrumentation', 'administration', 'communications', 'responsibility', 'transportation', 'implementation', 'administrative', 'specifications', 'representative', 'infrastructure', 'identification', 'classification', 'representation', 'recommendation', 'interpretation', 'rehabilitation', 'discrimination', 'authentication', 'transformation', 'administrators', 'pharmaceutical', 'organizational', 'qualifications', 'accommodations', 'constitutional', 'correspondence', 'considerations', 'investigations', 'accountability', 'concentrations', 'simultaneously', 'advertisements', 'reconstruction', 'characteristic', 'sustainability', 'knowledgestorm', 'appropriations', 'superintendent', 'cardiovascular', 'configurations', 'international', 'entertainment', 'environmental', 'opportunities', 'communication', 'accommodation', 'organizations', 'understanding', 'documentation', 'announcements', 'manufacturing', 'configuration', 'comprehensive', 'approximately', 'automatically', 'advertisement', 'manufacturers', 'professionals', 'miscellaneous', 'massachusetts', 'relationships', 'administrator', 'participation', 'certification', 'investigation', 'circumstances', 'contributions', 'consideration', 'significantly', 'consolidation', 'accessibility', 'subscriptions', 'specification', 'organisations', 'collaboration', 'corresponding', 'determination', 'unfortunately', 'concentration', 'participating', 'establishment', 'presentations', 'undergraduate', 'effectiveness', 'institutional', 'functionality', 'compatibility', 'informational', 'architectural', 'authorization', 'psychological', 'collaborative', 'instructional', 'biotechnology', 'technological', 'demonstration', 'sophisticated', 'vulnerability', 'congressional', 'modifications', 'starsmerchant', 'practitioners', 'extraordinary', 'photographers', 'possibilities', 'semiconductor', 'illustrations', 'substantially', 'inappropriate', 'distinguished', 'developmental', 'notifications', 'independently', 'consciousness', 'transcription', 'complications', 'distributions', 'championships', 'precipitation', 'mediterranean', 'revolutionary', 'accreditation', 'parliamentary', 'questionnaire', 'qualification', 'computational', 'characterized', 'conversations', 'bibliographic', 'investigators', 'complimentary', 'contamination', 'alternatively', 'controversial', 'installations', 'entrepreneurs', 'commissioners', 'interventions', 'restructuring', 'liechtenstein', 'strengthening', 'professional', 'applications', 'requirements', 'construction', 'availability', 'organization', 'registration', 'distribution', 'publications', 'introduction', 'technologies', 'manufacturer', 'instructions', 'relationship', 'installation', 'particularly', 'architecture', 'presentation', 'pennsylvania', 'institutions', 'intelligence', 'participants', 'philadelphia', 'collectibles', 'subscription', 'contemporary', 'prescription', 'specifically', 'conservation', 'certificates', 'encyclopedia', 'reservations', 'compensation', 'agricultural', 'transmission', 'neighborhood', 'transactions', 'organisation', 'contribution', 'constitution', 'consultation', 'satisfaction', 'intellectual', 'experimental', 'notification', 'universities', 'improvements', 'arrangements', 'successfully', 'capabilities', 'collectables', 'descriptions', 'commissioner', 'respectively', 'productivity', 'incorporated', 'disabilities', 'optimization', 'restrictions', 'developments', 'destinations', 'championship', 'testimonials', 'observations', 'associations', 'conservative', 'measurements', 'expectations', 'independence', 'announcement', 'implementing', 'conversation', 'reproduction', 'environments', 'consequences', 'implications', 'intermediate', 'jurisdiction', 'alternatives', 'personalized', 'conditioning', 'partnerships', 'increasingly', 'conventional', 'occupational', 'representing', 'confirmation', 'interactions', 'intervention', 'modification', 'metropolitan', 'verification', 'demonstrated', 'alphabetical', 'photographer', 'bibliography', 'jacksonville', 'headquarters', 'commonwealth', 'mathematical', 'indianapolis', 'corporations', 'confidential', 'establishing', 'broadcasting', 'coordination', 'considerable', 'difficulties', 'presidential', 'surveillance', 'connectivity', 'significance', 'expenditures', 'occasionally', 'consolidated', 'preservation', 'governmental', 'manufactured', 'unemployment', 'performances', 'scholarships', 'recreational', 'downloadable', 'laboratories', 'perspectives', 'contributors', 'temperatures', 'competitions', 'christianity', 'negotiations', 'photographic', 'additionally', 'differential', 'nevertheless', 'distributors', 'illustration', 'contributing', 'transexuales', 'saskatchewan', 'cancellation', 'spirituality', 'calculations', 'accomplished', 'shareholders', 'subcommittee', 'periodically', 'masturbating', 'administered', 'pharmacology', 'consistently', 'appreciation', 'subsequently', 'individually', 'geographical', 'conferencing', 'investigator', 'entrepreneur', 'consequently', 'appointments', 'quantitative', 'screensavers', 'satisfactory', 'instrumental', 'translations', 'thanksgiving', 'stakeholders', 'participated', 'combinations', 'entertaining', 'prerequisite', 'enhancements', 'intersection', 'supplemental', 'interference', 'civilization', 'newfoundland', 'investigated', 'findarticles', 'acknowledged', 'techrepublic', 'unauthorized', 'demonstrates', 'specializing', 'reproductive', 'refrigerator', 'accompanying', 'anthropology', 'humanitarian', 'continuously', 'achievements', 'introductory', 'disciplinary', 'hypothetical', 'dramatically', 'fundamentals', 'masturbation', 'practitioner', 'dictionaries', 'transparency', 'municipality', 'experiencing', 'disappointed', 'acquisitions', 'biodiversity', 'subsidiaries', 'examinations', 'departmental', 'sufficiently', 'infringement', 'information', 'development', 'description', 'accessories', 'application', 'performance', 'association', 'electronics', 'environment', 'advertising', 'engineering', 'corporation', 'appropriate', 'responsible', 'independent', 'opportunity', 'restaurants', 'significant', 'educational', 'alternative', 'publication', 'photography', 'recommended', 'maintenance', 'phentermine', 'established', 'programming', 'instruments', 'traditional', 'interesting', 'individuals', 'regulations', 'certificate', 'communities', 'temperature', 'classifieds', 'competition', 'immediately', 'improvement', 'interactive', 'newsletters', 'integration', 'discussions', 'destination', 'tripadvisor', 'residential', 'partnership', 'suggestions', 'replacement', 'netherlands', 'translation', 'legislation', 'departments', 'agriculture', 'constitutes', 'combination', 'distributed', 'enterprises', 'marketplace', 'interracial', 'competitive', 'preparation', 'playstation', 'instruction', 'differences', 'mathematics', 'mississippi', 'connecticut', 'collections', 'participate', 'experienced', 'institution', 'proceedings', 'experiences', 'verzeichnis', 'enforcement', 'requirement', 'authorities', 'attractions', 'connections', 'switzerland', 'recognition', 'transaction', 'examination', 'outstanding', 'perspective', 'reservation', 'measurement', 'merchandise', 'preferences', 'photographs', 'comfortable', 'interaction', 'effectively', 'consultants', 'cooperation', 'acquisition', 'implemented', 'directories', 'investments', 'contractors', 'anniversary', 'conferences', 'consumption', 'legislative', 'researchers', 'unsubscribe', 'sustainable', 'philippines', 'statistical', 'definitions', 'recruitment', 'considering', 'governments', 'operational', 'productions', 'possibility', 'reliability', 'appointment', 'represented', 'underground', 'christopher', 'corrections', 'initiatives', 'composition', 'immigration', 'necessarily', 'surrounding', 'coordinator', 'fundamental', 'substantial', 'progressive', 'memorabilia', 'explanation', 'electricity', 'eligibility', 'involvement', 'demonstrate', 'intelligent', 'experiments', 'afghanistan', 'scholarship', 'supplements', 'contributed', 'ingredients', 'maintaining', 'preliminary', 'promotional', 'specialists', 'communicate', 'probability', 'illustrated', 'continental', 'achievement', 'limitations', 'orientation', 'flexibility', 'declaration', 'determining', 'destruction', 'advertisers', 'observation', 'restoration', 'convenience', 'peripherals', 'bestsellers', 'minneapolis', 'compilation', 'obligations', 'outsourcing', 'identifying', 'constructed', 'arrangement', 'conclusions', 'furthermore', 'cooperative', 'attachments', 'personality', 'methodology', 'transferred', 'distributor', 'specialized', 'populations', 'exploration', 'compression', 'submissions', 'prospective', 'spectacular', 'subscribers', 'potentially', 'constraints', 'dimensional', 'documentary', 'territories', 'palestinian', 'legislature', 'hospitality', 'procurement', 'theoretical', 'assignments', 'termination', 'comparative', 'expenditure', 'investigate', 'transmitted', 'screenshots', 'supervision', 'celebrities', 'syndication', 'celebration', 'permissions', 'calculation', 'exceptional', 'respondents', 'unavailable', 'essentially', 'assessments', 'sensitivity', 'springfield', 'enhancement', 'participant', 'medications', 'manufacture', 'therapeutic', 'accordingly', 'challenging', 'enlargement', 'conjunction', 'cholesterol', 'contracting', 'controlling', 'exclusively', 'inspiration', 'downloading', 'complicated', 'expressions', 'shakespeare', 'furnishings', 'renaissance', 'circulation', 'accompanied', 'proprietary', 'calculators', 'correlation', 'transparent', 'introducing', 'appreciated', 'albuquerque', 'assumptions', 'anticipated', 'copyrighted', 'respiratory', 'journalists', 'foundations', 'liabilities', 'exhibitions', 'competitors', 'resolutions', 'restriction', 'incorporate', 'backgrounds', 'lightweight', 'generations', 'tournaments', 'sponsorship', 'uncertainty', 'encouraging', 'comparisons', 'viewpicture', 'republicans', 'battlefield', 'disclaimers', 'imagination', 'refurbished', 'politicians', 'accommodate', 'approximate', 'biographies', 'screensaver', 'subdivision', 'distinction', 'nutritional', 'affiliation', 'adjustments', 'controllers', 'consultancy', 'conventions', 'coordinates', 'hydrocodone', 'consecutive', 'acknowledge', 'consequence', 'fundraising', 'innovations', 'transsexual', 'citizenship', 'informative', 'utilization', 'calibration', 'efficiently', 'consistency', 'atmospheric', 'proposition', 'designation', 'encountered', 'differently', 'integrating', 'occupations', 'equilibrium', 'instructors', 'highlighted', 'contributor', 'disciplines', 'motherboard', 'inspections', 'penetration', 'importantly', 'commitments', 'televisions', 'commodities', 'motorcycles', 'concentrate', 'workstation', 'disposition', 'fascinating', 'computation', 'predictions', 'arbitration', 'configuring', 'convergence', 'malpractice', 'specialties', 'invitations', 'functioning', 'convertible', 'secretariat', 'nominations', 'demographic', 'interpreted', 'evaluations', 'elimination', 'commissions', 'powerseller', 'unnecessary', 'negotiation', 'programmers', 'deutschland', 'smithsonian', 'positioning', 'temporarily', 'frequencies', 'reflections', 'endorsement', 'magnificent', 'supervisors', 'westminster', 'transcripts', 'continually', 'collectible', 'southampton', 'advancement', 'replication', 'inexpensive', 'evanescence', 'coordinated', 'controversy', 'beautifully', 'terminology', 'threatening', 'simulations', 'conditional', 'automobiles', 'telecharger', 'groundwater', 'university', 'management', 'technology', 'government', 'department', 'categories', 'conditions', 'experience', 'activities', 'additional', 'washington', 'california', 'discussion', 'collection', 'conference', 'individual', 'everything', 'production', 'commercial', 'newsletter', 'registered', 'protection', 'employment', 'commission', 'electronic', 'particular', 'facilities', 'statistics', 'investment', 'industrial', 'associated', 'foundation', 'population', 'navigation', 'operations', 'understand', 'connection', 'properties', 'assessment', 'especially', 'considered', 'enterprise', 'processing', 'resolution', 'components', 'assistance', 'disclaimer', 'membership', 'background', 'trademarks', 'television', 'interested', 'throughout', 'associates', 'businesses', 'restaurant', 'procedures', 'evaluation', 'references', 'literature', 'respective', 'definition', 'networking', 'australian', 'guidelines', 'difference', 'directions', 'automotive', 'successful', 'publishing', 'developing', 'historical', 'scientific', 'functional', 'monitoring', 'dictionary', 'accounting', 'techniques', 'permission', 'generation', 'characters', 'apartments', 'designated', 'integrated', 'compliance', 'acceptance', 'strategies', 'affiliates', 'multimedia', 'leadership', 'comparison', 'determined', 'statements', 'completely', 'electrical', 'applicable', 'basketball', 'identified', 'frequently', 'laboratory', 'industries', 'expression', 'provisions', 'principles', 'compatible', 'consulting', 'recreation', 'parameters', 'introduced', 'originally', 'philosophy', 'regulation', 'prevention', 'healthcare', 'maintained', 'increasing', 'containing', 'guaranteed', 'convention', 'previously', 'conversion', 'reasonable', 'importance', 'javascript', 'objectives', 'structures', 'continuing', 'accordance', 'annotation', 'percentage', 'supporting', 'specialist', 'concerning', 'developers', 'equivalent', 'curriculum', 'psychology', 'appliances', 'elementary', 'controlled', 'authorized', 'retirement', 'efficiency', 'commitment', 'interviews', 'classified', 'confidence', 'consistent', 'securities', 'democratic', 'dimensions', 'contribute', 'challenges', 'submission', 'regulatory', 'inspection', 'manchester', 'continuous', 'initiative', 'disability', 'contractor', 'affordable', 'tournament', 'publishers', 'performing', 'absolutely', 'calculator', 'sufficient', 'resistance', 'candidates', 'biological', 'transition', 'instrument', 'relatively', 'represents', 'pittsburgh', 'revolution', 'mechanical', 'recognized', 'completion', 'accessible', 'birmingham', 'consultant', 'controller', 'committees', 'innovation', 'newspapers', 'programmes', 'eventually', 'agreements', 'innovative', 'conclusion', 'settlement', 'purchasing', 'instructor', 'approaches', 'highlights', 'scientists', 'volunteers', 'attachment', 'calculated', 'appearance', 'parliament', 'situations', 'structural', 'prohibited', 'simulation', 'bankruptcy', 'substances', 'discovered', 'exhibition', 'nationwide', 'definitely', 'commentary', 'limousines', 'apparently', 'popularity', 'postposted', 'sacramento', 'impossible', 'depression', 'cincinnati', 'subsection', 'wallpapers', 'subsequent', 'motorcycle', 'disclosure', 'occupation', 'citysearch', 'atmosphere', 'experiment', 'federation', 'assignment', 'counseling', 'acceptable', 'medication', 'metabolism', 'personally', 'excellence', 'attributes', 'obligation', 'regardless', 'restricted', 'republican', 'attendance', 'adventures', 'appreciate', 'mechanisms', 'indicators', 'physicians', 'governance', 'capability', 'complaints', 'promotions', 'geographic', 'suspension', 'correction', 'supplement', 'admissions', 'convenient', 'displaying', 'encouraged', 'cartridges', 'automation', 'advantages', 'extensions', 'applicants', 'adjustment', 'treatments', 'camcorders', 'difficulty', 'collective', 'enrollment', 'interfaces', 'opposition', 'supervisor', 'attraction', 'customized', 'understood', 'amendments', 'attractive', 'recordings', 'polyphonic', 'adjustable', 'allocation', 'discipline', 'dispatched', 'installing', 'engagement', 'facilitate', 'subscriber', 'priorities', 'incredible', 'portuguese', 'everywhere', 'housewares', 'reputation', 'photograph', 'underlying', 'projection', 'diagnostic', 'automobile', 'downloaded', 'protective', 'sunglasses', 'preference', 'litigation', 'horizontal', 'ultimately', 'artificial', 'affiliated', 'activation', 'mitsubishi', 'processors', 'complexity', 'constantly', 'substitute', 'households', 'montgomery', 'louisville', 'algorithms', 'suggestion', 'connecting', 'proportion', 'essentials', 'protecting', 'separation', 'boundaries', 'luxembourg', 'deployment', 'colleagues', 'recruiting', 'prescribed', 'reproduced', 'queensland', 'addressing', 'discounted', 'bangladesh', 'constitute', 'graduation', 'variations', 'soundtrack', 'profession', 'separately', 'physiology', 'collecting', 'friendship', 'provincial', 'advertiser', 'encryption', 'possession', 'vegetables', 'thumbnails', 'respondent', 'accredited', 'compressed', 'scheduling', 'christians', 'impressive', 'relocation', 'violations', 'discretion', 'repository', 'generating', 'millennium', 'exceptions', 'macromedia', 'fellowship', 'copyrights', 'mastercard', 'chronicles', 'distribute', 'decorative', 'indigenous', 'validation', 'corruption', 'incentives', 'transcript', 'structured', 'reasonably', 'recommends', 'indicating', 'coordinate', 'limitation', 'widescreen', 'decorating', 'connectors', 'perception', 'infections', 'configured', 'analytical', 'assumption', 'technician', 'executives', 'supporters', 'withdrawal', 'veterinary', 'reflection', 'invitation', 'thumbzilla', 'translated', 'columnists', 'delivering', 'journalism', 'undertaken', 'identifier', 'conducting', 'impression', 'charleston', 'selections', 'projectors', 'vocational', 'pharmacies', 'completing', 'comparable', 'warranties', 'documented', 'paperbacks', 'vulnerable', 'transexual', 'mainstream', 'evaluating', 'volleyball', 'creativity', 'describing', 'quotations', 'behavioral', 'containers', 'screenshot', 'officially', 'consortium', 'recipients', 'traditions', 'humanities', 'britannica', 'visibility', 'strengthen', 'aggressive', 'determines', 'motivation', 'passengers', 'quantities', 'petersburg', 'powerpoint', 'obituaries', 'punishment', 'providence', 'remembered', 'wilderness', 'headphones', 'proceeding', 'volkswagen', 'subsidiary', 'terrorists', 'beneficial', 'threatened', 'prediction', 'ecological', 'consisting', 'submitting', 'mozambique', 'wellington', 'aboriginal', 'remarkable', 'preventing', 'productive', 'trackbacks', 'programmer', 'incomplete', 'legitimate', 'architects', 'unexpected', 'formatting', 'discussing', 'meaningful', 'blackberry', 'meditation', 'microphone', 'organizing', 'moderators', 'kazakhstan', 'kilometers', 'guarantees', 'indication', 'cigarettes', 'responding', 'physically', 'attempting', 'accurately', 'ministries', 'thoroughly', 'nottingham', 'identifies', 'interstate', 'systematic', 'madagascar', 'presenting', 'uzbekistan', 'richardson', 'fragrances', 'vocabulary', 'earthquake', 'geological', 'introduces', 'webmasters', 'acdbentity', 'conspiracy', 'cumulative', 'occasional', 'explicitly', 'girlfriend', 'influenced', 'complement', 'requesting', 'lauderdale', 'extraction', 'hypothesis', 'regression', 'collectors', 'azerbaijan', 'travelling', 'widespread', 'referenced', 'vietnamese', 'tremendous', 'surrounded', 'accomplish', 'vegetarian', 'ambassador', 'contacting', 'vegetation', 'infectious', 'continuity', 'phenomenon', 'charitable', 'burlington', 'researcher', 'qualifying', 'estimation', 'institutes', 'stationery', 'journalist', 'afterwards', 'signatures', 'simplified', 'housewives', 'influences', 'irrigation', 'conviction', 'explaining', 'nomination', 'dependence', 'suggesting', 'privileges', 'landscapes', 'editorials', 'nationally', 'waterproof', 'alexandria', 'paragraphs', 'adolescent', 'occurrence', 'immigrants', 'helicopter', 'surprising', 'yugoslavia', 'likelihood', 'endangered', 'compromise', 'expiration', 'peripheral', 'greensboro', 'revelation', 'delegation', 'greenhouse', 'currencies', 'descending', 'psychiatry', 'persistent', 'adaptation', 'absorption', 'excitement', 'mysterious', 'indonesian', 'relaxation', 'thereafter', 'forwarding', 'reductions', 'portsmouth', 'harassment', 'generators', 'huntington', 'internship', 'beastality', 'antarctica', 'chancellor', 'antibodies', 'immunology', 'encourages', 'conceptual', 'translator', 'challenged', 'constraint', 'insulation', 'subjective', 'available', 'copyright', 'education', 'community', 'following', 'resources', 'including', 'directory', 'insurance', 'different', 'september', 'questions', 'financial', 'equipment', 'important', 'something', 'committee', 'reference', 'companies', 'computers', 'president', 'australia', 'agreement', 'marketing', 'solutions', 'technical', 'microsoft', 'statement', 'downloads', 'subscribe', 'treatment', 'knowledge', 'currently', 'published', 'corporate', 'customers', 'materials', 'countries', 'standards', 'political', 'advertise', 'institute', 'sponsored', 'condition', 'effective', 'selection', 'executive', 'necessary', 'according', 'christmas', 'furniture', 'wednesday', 'structure', 'potential', 'documents', 'operating', 'developed', 'telephone', 'therefore', 'christian', 'worldwide', 'publisher', 'excellent', 'interface', 'operation', 'beautiful', 'locations', 'providing', 'authority', 'programme', 'employees', 'relations', 'completed', 'otherwise', 'character', 'functions', 'submitted', 'regarding', 'increased', 'beginning', 'specified', 'sometimes', 'transport', 'galleries', 'presented', 'secretary', 'magazines', 'francisco', 'described', 'attention', 'situation', 'emergency', 'determine', 'ringtones', 'difficult', 'satellite', 'recommend', 'professor', 'generally', 'continued', 'component', 'guarantee', 'processes', 'interests', 'paperback', 'supported', 'providers', 'thousands', 'generated', 'practices', 'reporting', 'essential', 'affiliate', 'universal', 'languages', 'challenge', 'certified', 'computing', 'procedure', 'religious', 'breakfast', 'developer', 'minnesota', 'adventure', 'assistant', 'headlines', 'yesterday', 'wholesale', 'extension', 'classical', 'dedicated', 'direction', 'wisconsin', 'personnel', 'advantage', 'estimated', 'anonymous', 'interview', 'framework', 'installed', 'associate', 'decisions', 'editorial', 'strategic', 'economics', 'apartment', 'favorites', 'estimates', 'protected', 'officials', 'contained', 'tennessee', 'frequency', 'residents', 'displayed', 'performed', 'addresses', 'permanent', 'portfolio', 'practical', 'delivered', 'exclusive', 'utilities', 'reduction', 'nutrition', 'recording', 'secondary', 'wonderful', 'announced', 'mentioned', 'automatic', 'connected', 'directors', 'amendment', 'libraries', 'singapore', 'principal', 'buildings', 'household', 'batteries', 'positions', 'panasonic', 'permalink', 'signature', 'provision', 'certainly', 'newspaper', 'liability', 'trademark', 'trackback', 'americans', 'promotion', 'broadband', 'influence', 'webmaster', 'represent', 'louisiana', 'evolution', 'suggested', 'concerned', 'suppliers', 'receiving', 'discussed', 'elizabeth', 'establish', 'expressed', 'paragraph', 'conducted', 'requested', 'personals', 'immediate', 'searching', 'extremely', 'contracts', 'chemistry', 'variables', 'continues', 'responses', 'unlimited', 'printable', 'hardcover', 'celebrity', 'chocolate', 'hampshire', 'bluetooth', 'pregnancy', 'biography', 'financing', 'efficient', 'specialty', 'qualified', 'discovery', 'lifestyle', 'clearance', 'inventory', 'converter', 'objective', 'indicated', 'volunteer', 'parameter', 'processor', 'encourage', 'consumers', 'territory', 'resulting', 'cambridge', 'execution', 'increases', 'indicates', 'committed', 'extensive', 'candidate', 'databases', 'messenger', 'discounts', 'catalogue', 'caribbean', 'remaining', 'depending', 'expansion', 'purchased', 'collected', 'featuring', 'implement', 'scheduled', 'temporary', 'awareness', 'vancouver', 'packaging', 'northwest', 'classroom', 'democracy', 'wallpaper', 'baltimore', 'charlotte', 'physician', 'hollywood', 'wikipedia', 'spiritual', 'preferred', 'listening', 'landscape', 'dependent', 'applicant', 'licensing', 'textbooks', 'diversity', 'cleveland', 'sensitive', 'templates', 'technique', 'calculate', 'alexander', 'broadcast', 'converted', 'accessory', 'typically', 'uniprotkb', 'afternoon', 'ownership', 'molecular', 'residence', 'attorneys', 'operators', 'employers', 'elections', 'stainless', 'hospitals', 'exception', 'indonesia', 'primarily', 'organized', 'improving', 'expensive', 'explained', 'expertise', 'mechanism', 'jewellery', 'disorders', 'detection', 'formation', 'engineers', 'proposals', 'moderator', 'tutorials', 'fantastic', 'appointed', 'animation', 'designers', 'melbourne', 'blackjack', 'argentina', 'dangerous', 'workshops', 'hurricane', 'mortgages', 'carefully', 'investors', 'diagnosis', 'principle', 'vacations', 'notebooks', 'algorithm', 'valentine', 'involving', 'investing', 'admission', 'terrorism', 'allocated', 'municipal', 'describes', 'substance', 'addressed', 'existence', 'attribute', 'mountains', 'obviously', 'cartridge', 'permitted', 'workplace', 'confirmed', 'infection', 'placement', 'vbulletin', 'donations', 'radiation', 'seriously', 'elsewhere', 'pollution', 'guestbook', 'purchases', 'chemicals', 'everybody', 'nashville', 'satisfied', 'milwaukee', 'warehouse', 'shareware', 'violation', 'configure', 'stability', 'southwest', 'somewhere', 'recognize', 'screening', 'thumbnail', 'forgotten', 'edinburgh', 'merchants', 'arguments', 'amsterdam', 'regularly', 'customize', 'tradition', 'emissions', 'complaint', 'camcorder', 'producing', 'precision', 'franchise', 'reception', 'correctly', 'geography', 'appearing', 'integrity', 'processed', 'emotional', 'platforms', 'overnight', 'districts', 'introduce', 'chevrolet', 'generator', 'showtimes', 'promoting', 'machinery', 'bandwidth', 'dimension', 'schedules', 'quarterly', 'alternate', 'automated', 'passenger', 'childhood', 'preparing', 'recipient', 'athletics', 'southeast', 'migration', 'basically', 'coalition', 'producers', 'collector', 'returning', 'container', 'defendant', 'departure', 'assurance', 'happening', 'jefferson', 'gardening', 'orchestra', 'voluntary', 'messaging', 'additions', 'requiring', 'refinance', 'calendars', 'measuring', 'transfers', 'bookstore', 'parenting', 'manhattan', 'workforce', 'discharge', 'halloween', 'hazardous', 'resistant', 'democrats', 'recycling', 'slideshow', 'variation', 'wrestling', 'photoshop', 'gathering', 'criticism', 'statutory', 'northeast', 'paintings', 'injection', 'yorkshire', 'initially', 'indicator', 'eliminate', 'threshold', 'venezuela', 'sequences', 'astronomy', 'translate', 'announces', 'perfectly', 'instantly', 'terrorist', 'ecommerce', 'suffering', 'connector', 'naturally', 'underwear', 'inclusive', 'selecting', 'pantyhose', 'barcelona', 'exercises', 'protocols', 'highlight', 'inclusion', 'hopefully', 'brilliant', 'evaluated', 'authentic', 'macintosh', 'movements', 'amenities', 'virtually', 'projector', 'surprised', 'genealogy', 'liverpool', 'similarly', 'excluding', 'commander', 'spotlight', 'logistics', 'symposium', 'intensive', 'switching', 'testimony', 'mandatory', 'filtering', 'offensive', 'separated', 'directive', 'governing', 'retailers', 'attending', 'traveling', 'catherine', 'occasions', 'equations', 'musicians', 'composite', 'designing', 'brunswick', 'streaming', 'intensity', 'syndicate', 'antivirus', 'concluded', 'desperate', 'addiction', 'synthesis', 'undefined', 'newcastle', 'societies', 'brazilian', 'identical', 'petroleum', 'norwegian', 'retention', 'exchanges', 'wondering', 'dominican', 'expanding', 'rochester', 'stockings', 'destroyed', 'wordpress', 'celebrate', 'appliance', 'mortality', 'architect', 'microwave', 'accidents', 'cosmetics', 'realistic', 'christina', 'achieving', 'fisheries', 'reactions', 'companion', 'divisions', 'victorian', 'lithuania', 'obtaining', 'campaigns', 'instances', 'scientist', 'arthritis', 'inflation', 'compounds', 'graduates', 'arlington', 'particles', 'construct', 'inquiries', 'affecting', 'aggregate', 'forecasts', 'shopzilla', 'conflicts', 'travelers', 'offerings', 'incorrect', 'guatemala', 'pathology', 'ordinance', 'festivals', 'jerusalem', 'invention', 'enquiries', 'cognitive', 'exploring', 'registrar', 'predicted', 'ministers', 'prostores', 'relevance', 'incentive', 'butterfly', 'mechanics', 'numerical', 'princeton', 'meanwhile', 'childrens', 'porcelain', 'pichunter', 'consensus', 'intention', 'statewide', 'happiness', 'fragrance', 'neighbors', 'champions', 'comparing', 'projected', 'regulated', 'confusion', 'keyboards', 'reflected', 'maternity', 'decreased', 'tolerance', 'lightning', 'inspector', 'bookmarks', 'riverside', 'bathrooms', 'abilities', 'initiated', 'nonprofit', 'lancaster', 'suspended', 'attitudes', 'integrate', 'sociology', 'confident', 'retrieved', 'delicious', 'hungarian', 'referring', 'transform', 'educators', 'vegetable', 'alignment', 'henderson', 'encounter', 'accessing', 'attempted', 'witnesses', 'frederick', 'disturbed', 'sculpture', 'treasurer', 'scripting', 'synthetic', 'testament', 'specifics', 'partially', 'breakdown', 'strengths', 'furnished', 'discusses', 'responded', 'abstracts', 'thesaurus', 'literally', 'appraisal', 'citations', 'sheffield', 'finishing', 'prospects', 'bulgarian', 'boulevard', 'compliant', 'penalties', 'activated', 'armstrong', 'christine', 'accepting', 'isolation', 'sustained', 'greetings', 'inherited', 'chronicle', 'plaintiff', 'prisoners', 'nightlife', 'freelance', 'excessive', 'valuation', 'cigarette', 'metallica', 'narrative', 'academics', 'livestock', 'exemption', 'nicaragua', 'relatives', 'bracelets', 'telephony', 'breathing', 'thickness', 'graphical', 'aerospace', 'maintains', 'shortcuts', 'voyeurweb', 'extending', 'specifies', 'macedonia', 'combining', 'standings', 'partition', 'invisible', 'commodity', 'competing', 'peninsula', 'considers', 'vacancies', 'modelling', 'miniature', 'perceived', 'centuries', 'magnitude', 'assessing', 'handhelds', 'answering', 'impressed', 'organizer', 'preceding', 'amplifier', 'arbitrary', 'prominent', 'lexington', 'contacted', 'recorders', 'postcards', 'reviewing', 'bloomberg', 'hierarchy', 'abandoned', 'mauritius', 'checklist', 'scenarios', 'elevation', 'beverages', 'medicines', 'renewable', 'passwords', 'astrology', 'occurring', 'induction', 'precisely', 'spreading', 'provinces', 'incidence', 'incidents', 'enhancing', 'palestine', 'listprice', 'publicity', 'allowance', 'duplicate', 'criterion', 'corrected', 'creatures', 'commented', 'stephanie', 'specially', 'conscious', 'referrals', 'revisions', 'reasoning', 'annotated', 'convinced', 'replacing', 'watershed', 'privilege', 'pediatric', 'brochures', 'traveller', 'suspected', 'benchmark', 'beginners', 'competent', 'gradually', 'desirable', 'religions', 'explosion', 'daughters', 'champagne', 'deviation', 'hydraulic', 'robertson', 'omissions', 'retrieval', 'qualities', 'prototype', 'apparatus', 'empirical', 'sexuality', 'polyester', 'remainder', 'molecules', 'intervals', 'sentences', 'exclusion', 'holocaust', 'receivers', 'navigator', 'marijuana', 'cathedral', 'fairfield', 'lafayette', 'salvation', 'selective', 'sacrifice', 'removable', 'gibraltar', 'reporters', 'necessity', 'rendering', 'hepatitis', 'economies', 'bacterial', 'undertake', 'cancelled', 'reservoir', 'worcester', 'respected', 'portraits', 'performer', 'graduated', 'namespace', 'opponents', 'nominated', 'electoral', 'changelog', 'recovered', 'upgrading', 'frontpage', 'demanding', 'defensive', 'forbidden', 'monitored', 'motivated', 'examining', 'survivors', 'detective', 'fireplace', 'summaries', 'looksmart', 'colleague', 'paintball', 'enclosure', 'distances', 'treasures', 'communist', 'creations', 'presently', 'librarian', 'stockholm', 'therapist', 'promising', 'nightmare', 'organisms', 'telescope', 'generates', 'receptors', 'aluminium', 'snowboard', 'shipments', 'legendary', 'gentleman', 'reproduce', 'convicted', 'roommates', 'spokesman', 'activists', 'frankfurt', 'assembled', 'terminals', 'crossword', 'continent', 'longitude', 'insertion', 'swaziland', 'varieties', 'mediawiki', 'business', 'services', 'products', 'software', 'research', 'comments', 'national', 'internet', 'shipping', 'reserved', 'security', 'american', 'computer', 'download', 'pictures', 'personal', 'location', 'children', 'students', 'shopping', 'previous', 'property', 'customer', 'december', 'training', 'advanced', 'category', 'register', 'november', 'features', 'industry', 'provided', 'required', 'articles', 'feedback', 'complete', 'standard', 'programs', 'language', 'password', 'question', 'building', 'february', 'analysis', 'possible', 'problems', 'interest', 'learning', 'delivery', 'original', 'includes', 'messages', 'provides', 'specific', 'director', 'planning', 'database', 'official', 'district', 'calendar', 'resource', 'document', 'material', 'together', 'function', 'economic', 'projects', 'included', 'received', 'archives', 'magazine', 'policies', 'position', 'listings', 'wireless', 'purchase', 'response', 'practice', 'hardware', 'designed', 'discount', 'remember', 'increase', 'european', 'activity', 'although', 'contents', 'regional', 'supplies', 'exchange', 'continue', 'benefits', 'anything', 'mortgage', 'solution', 'addition', 'clothing', 'homepage', 'military', 'decision', 'division', 'actually', 'saturday', 'starting', 'thursday', 'consumer', 'contract', 'releases', 'virginia', 'multiple', 'featured', 'friendly', 'schedule', 'everyone', 'approach', 'physical', 'medicine', 'evidence', 'favorite', 'recently', 'probably', 'networks', 'transfer', 'carolina', 'hospital', 'overview', 'distance', 'involved', 'partners', 'existing', 'selected', 'patients', 'directly', 'searches', 'strategy', 'teaching', 'canadian', 'positive', 'football', 'abstract', 'contains', 'republic', 'vacation', 'academic', 'graphics', 'expected', 'mountain', 'consider', 'northern', 'proposed', 'reported', 'politics', 'modified', 'released', 'internal', 'detailed', 'japanese', 'approved', 'southern', 'pressure', 'keywords', 'purposes', 'external', 'teachers', 'subjects', 'capacity', 'requires', 'electric', 'creative', 'progress', 'families', 'accepted', 'agencies', 'michigan', 'columbia', 'critical', 'employee', 'packages', 'colorado', 'relevant', 'illinois', 'elements', 'facility', 'minister', 'visitors', 'coverage', 'clinical', 'sciences', 'currency', 'commerce', 'accounts', 'settings', 'cultural', 'holidays', 'graduate', 'thinking', 'provider', 'optional', 'sections', 'websites', 'religion', 'measures', 'chemical', 'exercise', 'meetings', 'congress', 'username', 'produced', 'argument', 'creating', 'attorney', 'auctions', 'informed', 'thoughts', 'quantity', 'platform', 'machines', 'recovery', 'merchant', 'vehicles', 'campaign', 'examples', 'motorola', 'intended', 'election', 'requests', 'separate', 'identify', 'domestic', 'extended', 'sequence', 'williams', 'movement', 'printing', 'baseball', 'approval', 'contacts', 'matching', 'offering', 'variable', 'compared', 'workshop', 'lighting', 'portable', 'returned', 'warranty', 'assembly', 'criminal', 'powerful', 'obtained', 'supplied', 'opinions', 'maintain', 'priority', 'payments', 'straight', 'prepared', 'criteria', 'behavior', 'changing', 'festival', 'whatever', 'maryland', 'eligible', 'checkout', 'handling', 'scotland', 'followed', 'protocol', 'designer', 'marriage', 'negative', 'missouri', 'ministry', 'proposal', 'birthday', 'slightly', 'lingerie', 'profiles', 'controls', 'breaking', 'combined', 'ultimate', 'reviewed', 'forecast', 'accuracy', 'pharmacy', 'creation', 'chairman', 'violence', 'oklahoma', 'speakers', 'cleaning', 'concerns', 'officers', 'referred', 'supports', 'presence', 'majority', 'strength', 'daughter', 'standing', 'ordering', 'bookmark', 'specials', 'improved', 'exposure', 'gambling', 'outdoors', 'printers', 'kentucky', 'interior', 'relative', 'identity', 'victoria', 'revision', 'instance', 'licensed', 'recorded', 'finished', 'discover', 'patterns', 'stations', 'greatest', 'operator', 'tracking', 'accurate', 'managing', 'happened', 'lesbians', 'managers', 'aircraft', 'conflict', 'versions', 'employer', 'describe', 'citizens', 'heritage', 'audience', 'assigned', 'directed', 'sporting', 'affected', 'expenses', 'indicate', 'anderson', 'diseases', 'thailand', 'advisory', 'template', 'anywhere', 'atlantic', 'investor', 'wildlife', 'speaking', 'sponsors', 'checking', 'guidance', 'observed', 'glossary', 'channels', 'ericsson', 'appendix', 'supplier', 'arkansas', 'notebook', 'explorer', 'historic', 'attached', 'disabled', 'upcoming', 'constant', 'portland', 'concepts', 'relating', 'alliance', 'engineer', 'becoming', 'relation', 'colleges', 'brothers', 'presents', 'estimate', 'bulletin', 'epinions', 'painting', 'universe', 'watching', 'sterling', 'sessions', 'journals', 'jennifer', 'terminal', 'nebraska', 'properly', 'hundreds', 'tomorrow', 'visiting', 'downtown', 'keyboard', 'suitable', 'millions', 'findings', 'clicking', 'province', 'catholic', 'governor', 'swimming', 'pakistan', 'reliable', 'symptoms', 'memorial', 'fighting', 'pregnant', 'cellular', 'normally', 'diabetes', 'flexible', 'numerous', 'superior', 'spending', 'magnetic', 'registry', 'employed', 'displays', 'allowing', 'earnings', 'delaware', 'counties', 'occurred', 'concrete', 'accident', 'resident', 'possibly', 'flashing', 'malaysia', 'antiques', 'parallel', 'bathroom', 'drinking', 'reaction', 'enhanced', 'entitled', 'generate', 'monitors', 'duration', 'pursuant', 'contrast', 'adoption', 'measured', 'marshall', 'thousand', 'hamilton', 'tutorial', 'portugal', 'lawrence', 'valuable', 'airlines', 'aviation', 'disaster', 'commands', 'achieved', 'injuries', 'nintendo', 'appeared', 'franklin', 'exciting', 'ringtone', 'pleasure', 'oriented', 'desktops', 'columbus', 'producer', 'semester', 'strongly', 'proteins', 'familiar', 'carrying', 'editions', 'vertical', 'absolute', 'consists', 'soldiers', 'guardian', 'classics', 'bringing', 'evaluate', 'tropical', 'pipeline', 'everyday', 'ethernet', 'handbook', 'navigate', 'somewhat', 'receiver', 'scottish', 'richmond', 'covering', 'platinum', 'judgment', 'bedrooms', 'modeling', 'spectrum', 'emphasis', 'princess', 'entering', 'thompson', 'memories', 'adequate', 'cartoons', 'entirely', 'replaced', 'reducing', 'shooting', 'launched', 'suggests', 'operated', 'overseas', 'surprise', 'shoppers', 'supposed', 'ordinary', 'applying', 'reporter', 'champion', 'sentence', 'outcomes', 'survival', 'jonathan', 'whenever', 'lifetime', 'athletic', 'campbell', 'traveler', 'aluminum', 'wishlist', 'trailers', 'syndrome', 'expanded', 'bulgaria', 'believed', 'spanking', 'catering', 'incident', 'dynamics', 'decrease', 'revenues', 'emerging', 'churches', 'reserves', 'minority', 'recorder', 'seminars', 'paradise', 'compiled', 'romantic', 'revealed', 'margaret', 'portions', 'equation', 'reviewer', 'involves', 'earrings', 'chapters', 'literary', 'choosing', 'boundary', 'believes', 'deadline', 'equipped', 'broadway', 'acquired', 'entrance', 'attempts', 'answered', 'disorder', 'firewall', 'animated', 'judicial', 'bachelor', 'attitude', 'montreal', 'genetics', 'attended', 'mitchell', 'embedded', 'brochure', 'petition', 'shoulder', 'diameter', 'literacy', 'moderate', 'opposite', 'dealtime', 'mercedes', 'tramadol', 'receives', 'veterans', 'occasion', 'sleeping', 'moreover', 'michelle', 'dialogue', 'declared', 'handheld', 'disposal', 'florists', 'switches', 'blogging', 'midnight', 'commonly', 'pleasant', 'announce', 'sampling', 'inspired', 'weddings', 'suddenly', 'netscape', 'township', 'rankings', 'robinson', 'remained', 'entities', 'roulette', 'medicare', 'explains', 'feelings', 'freeware', 'donation', 'targeted', 'realized', 'gamecube', 'climbing', 'somebody', 'colombia', 'archived', 'courtesy', 'detected', 'bracelet', 'juvenile', 'acoustic', 'cassette', 'steering', 'cemetery', 'contests', 'berkeley', 'adjusted', 'seasonal', 'counters', 'cultures', 'coaching', 'examined', 'encoding', 'cosmetic', 'resulted', 'portrait', 'carriers', 'mobility', 'builders', 'struggle', 'crossing', 'resolved', 'branches', 'holdings', 'zimbabwe', 'browsing', 'bargains', 'frequent', 'ensuring', 'hispanic', 'diamonds', 'untitled', 'marriott', 'starring', 'referral', 'distinct', 'verified', 'formerly', 'situated', 'strictly', 'retailer', 'vitamins', 'brooklyn', 'phillips', 'interval', 'expansys', 'repeated', 'filename', 'florence', 'analyzes', 'drawings', 'scenario', 'junction', 'weekends', 'produces', 'kingston', 'adapters', 'adjacent', 'reaching', 'receptor', 'surgical', 'citation', 'premises', 'imperial', 'benjamin', 'studying', 'upgrades', 'offshore', 'harrison', 'emission', 'apparent', 'outreach', 'mounting', 'balanced', 'explicit', 'precious', 'annually', 'scanners', 'delivers', 'necklace', 'arranged', 'theaters', 'advocacy', 'threaded', 'footwear', 'licenses', 'removing', 'isolated', 'assisted', 'compound', 'abortion', 'wellness', 'membrane', 'previews', 'exterior', 'greeting', 'botswana', 'velocity', 'composed', 'baseline', 'honolulu', 'electron', 'passport', 'treasury', 'occupied', 'observer', 'sunshine', 'ceremony', 'arrested', 'homework', 'assessed', 'enabling', 'stronger', 'advances', 'darkness', 'stanford', 'rejected', 'gamespot', 'railroad', 'lectures', 'cheapest', 'travesti', 'salvador', 'tanzania', 'preserve', 'unsigned', 'theories', 'executed', 'showcase', 'integral', 'synopsis', 'composer', 'accessed', 'imported', 'contrary', 'focusing', 'admitted', 'equality', 'stickers', 'concerts', 'cambodia', 'updating', 'readings', 'confused', 'compiler', 'airports', 'brunette', 'gathered', 'slovenia', 'notified', 'dramatic', 'surfaces', 'terrible', 'reflects', 'taxation', 'treasure', 'assuming', 'monetary', 'floating', 'plymouth', 'warnings', 'stunning', 'actively', 'cookbook', 'uploaded', 'collapse', 'americas', 'unlikely', 'beverage', 'forestry', 'barriers', 'infected', 'particle', 'minerals', 'humidity', 'operates', 'brisbane', 'manitoba', 'missions', 'costumes', 'nickname', 'staffing', 'playlist', 'statutes', 'enrolled', 'publicly', 'reseller', 'suffered', 'informal', 'swingers', 'mistakes', 'defining', 'counting', 'medieval', 'captured', 'innocent', 'scanning', 'cordless', 'patricia', 'disagree', 'episodes', 'circular', 'mainland', 'interact', 'auckland', 'olympics', 'trinidad', 'geometry', 'slovakia', 'gorgeous', 'barbados', 'chrysler', 'mcdonald', 'plumbing', 'brussels', 'shanghai', 'davidson', 'organize', 'triangle', 'oriental', 'hydrogen', 'webshots', 'advocate', 'artistic', 'detector', 'colonial', 'proceeds', 'indirect', 'browsers', 'overcome', 'brighton', 'reminder', 'searched', 'insights', 'sullivan', 'exhibits', 'bacteria', 'moisture', 'symantec', 'launches', 'latitude', 'deposits', 'mistress', 'trustees', 'reprints', 'midlands', 'analysts', 'nicholas', 'invasion', 'spelling', 'medicaid', 'infrared', 'quarters', 'naturals', 'fixtures', 'bloggers', 'flooring', 'ethiopia', 'athletes', 'humanity', 'scholars', 'snapshot', 'segments', 'dominant', 'minimize', 'fraction', 'adelaide', 'emirates', 'promised', 'bookings', 'fabulous', 'maritime', 'periodic', 'overhead', 'prospect', 'shipment', 'breeding', 'envelope', 'homeland', 'excluded', 'emotions', 'incoming', 'cleaners', 'cashiers', 'rotation', 'premiere', 'villages', 'symphony', 'rational', 'fighters', 'chambers', 'fountain', 'regarded', 'egyptian', 'outlined', 'headline', 'treating', 'enormous', 'honduras', 'cabinets', 'hartford', 'wrapping', 'timeline', 'infinite', 'civilian', 'realtors', 'wherever', 'democrat', 'retained', 'logitech', 'briefing', 'highland', 'hawaiian', 'consoles', 'cylinder', 'surround', 'finances', 'enjoying', 'italiano', 'carnival', 'promises', 'combines', 'bradford', 'reynolds', 'speeches', 'catalogs', 'savannah', 'pointing', 'metadata', 'circuits', 'handbags', 'somerset', 'incurred', 'roommate', 'failures', 'theology', 'edmonton', 'retrieve', 'worldcat', 'titanium', 'deutsche', 'postings', 'cornwall', 'basement', 'sandwich', 'hearings', 'textbook', 'frontier', 'stopping', 'refugees', 'peaceful', 'doctrine', 'trainers', 'conclude', 'advisors', 'pavilion', 'talented', 'paraguay', 'boutique', 'peterson', 'homeless', 'horrible', 'metallic', 'warriors', 'cadillac', 'parental', 'marathon', 'pressing', 'gasoline', 'warcraft', 'catalyst', 'analyzed', 'remedies', 'validity', 'weighted', 'performs', 'plastics', 'salaries', 'postcard', 'elephant', 'drainage', 'clearing', 'routines', 'reliance', 'striking', 'podcasts', 'ensemble', 'biblical', 'prostate', 'nitrogen', 'softball', 'firewire', 'musician', 'blocking', 'limiting', 'dispatch', 'restored', 'chargers', 'rendered', 'openings', 'councils', 'cottages', 'develops', 'dressing', 'drilling', 'tomatoes', 'clusters', 'antibody', 'momentum', 'grateful', 'laughing', 'opponent', 'propecia', 'mongolia', 'manually', 'centered', 'writings', 'charging', 'discrete', 'beginner', 'sapphire', 'crawford', 'declined', 'neighbor', 'highways', 'thinkpad', 'intimate', 'dentists', 'variance', 'cameroon', 'adaptive', 'computed', 'invision', 'generous', 'learners', 'aberdeen', 'educated', 'inserted', 'basename', 'suburban', 'survivor', 'cingular', 'impaired', 'ventures', 'stranger', 'tribunal', 'pensions', 'mattress', 'likewise', 'charming', 'annoying', 'disclose', 'restrict', 'springer', 'endorsed', 'maximize', 'senators', 'bleeding', 'optimize', 'caroline', 'engaging', 'deferred', 'polished', 'simpsons', 'flashers', 'arrivals', 'securely', 'fioricet', 'promoted', 'enclosed', 'thriller', 'transmit', 'planners', 'disputes', 'textiles', 'intranet', 'aquarium', 'promptly', 'sexually', 'dividend', 'playback', 'hometown', 'handmade', 'workflow', 'switched', 'richards', 'hardwood', 'temporal', 'airplane', 'istanbul', 'asbestos', 'throwing', 'potatoes', 'thorough', 'creature', 'syracuse', 'maldives', 'firmware', 'shepherd', 'canberra', 'sympathy', 'avoiding', 'surgeons', 'promotes', 'johnston', 'prisoner', 'earliest', 'morrison', 'examines', 'budapest', 'knitting', 'attacked', 'smallest', 'monsters', 'lightbox', 'cocktail', 'contact', 'service', 'product', 'support', 'message', 'privacy', 'company', 'general', 'january', 'reviews', 'program', 'details', 'results', 'address', 'subject', 'special', 'website', 'project', 'version', 'section', 'related', 'members', 'network', 'systems', 'without', 'current', 'control', 'history', 'account', 'digital', 'profile', 'another', 'quality', 'listing', 'content', 'country', 'private', 'compare', 'include', 'college', 'article', 'provide', 'process', 'science', 'english', 'windows', 'gallery', 'however', 'october', 'library', 'medical', 'looking', 'comment', 'working', 'payment', 'student', 'problem', 'options', 'america', 'example', 'changes', 'release', 'request', 'picture', 'meeting', 'similar', 'schools', 'million', 'popular', 'stories', 'journal', 'reports', 'welcome', 'central', 'council', 'archive', 'society', 'friends', 'edition', 'updated', 'already', 'studies', 'several', 'display', 'limited', 'powered', 'natural', 'whether', 'weather', 'average', 'records', 'present', 'written', 'federal', 'hosting', 'tickets', 'finance', 'minutes', 'reading', 'usually', 'percent', 'getting', 'germany', 'various', 'receive', 'methods', 'chapter', 'manager', 'michael', 'florida', 'license', 'holiday', 'writing', 'effects', 'created', 'kingdom', 'thought', 'storage', 'summary', 'western', 'overall', 'package', 'players', 'started', 'someone', 'printer', 'believe', 'nothing', 'certain', 'running', 'jewelry', 'islands', 'british', 'sellers', 'tuesday', 'lesbian', 'machine', 'returns', 'capital', 'england', 'culture', 'courses', 'airport', 'foreign', 'outside', 'channel', 'located', 'primary', 'numbers', 'browser', 'purpose', 'feature', 'cameras', 'ratings', 'chicago', 'sources', 'regular', 'station', 'rentals', 'improve', 'parents', 'kitchen', 'wedding', 'disease', 'perfect', 'classic', 'command', 'william', 'express', 'success', 'maximum', 'warning', 'forward', 'flowers', 'animals', 'housing', 'catalog', 'traffic', 'ireland', 'testing', 'instead', 'leading', 'fitness', 'chinese', 'opinion', 'greater', 'develop', 'artists', 'session', 'century', 'pacific', 'mailing', 'vehicle', 'default', 'require', 'outdoor', 'morning', 'protein', 'partner', 'authors', 'faculty', 'parties', 'mission', 'richard', 'ability', 'battery', 'defined', 'playing', 'virtual', 'answers', 'offered', 'surface', 'minimum', 'variety', 'finally', 'updates', 'desktop', 'classes', 'officer', 'respect', 'unknown', 'teacher', 'workers', 'georgia', 'showing', 'benefit', 'funding', 'devices', 'fiction', 'watches', 'careers', 'complex', 'spanish', 'setting', 'economy', 'highest', 'helpful', 'monthly', 'musical', 'angeles', 'changed', 'russian', 'largest', 'african', 'justice', 'connect', 'applied', 'advance', 'auction', 'allowed', 'correct', 'charles', 'selling', 'species', 'pricing', 'fashion', 'monitor', 'trading', 'clients', 'actions', 'discuss', 'markets', 'leather', 'patient', 'perhaps', 'persons', 'village', 'amateur', 'factors', 'zealand', 'balance', 'replies', 'initial', 'fishing', 'fantasy', 'letters', 'context', 'install', 'apparel', 'johnson', 'quickly', 'dollars', 'driving', 'surgery', 'brought', 'diamond', 'servers', 'seconds', 'arizona', 'keyword', 'italian', 'freedom', 'premium', 'upgrade', 'growing', 'hearing', 'eastern', 'therapy', 'entries', 'serious', 'samsung', 'efforts', 'nursing', 'defense', 'covered', 'protect', 'prevent', 'finding', 'affairs', 'towards', 'suggest', 'charges', 'reasons', 'talking', 'element', 'quarter', 'missing', 'sitemap', 'houston', 'centers', 'opening', 'reserve', 'recipes', 'plastic', 'produce', 'counter', 'failure', 'follows', 'weekend', 'ontario', 'readers', 'jackson', 'leaders', 'posters', 'parking', 'seattle', 'brother', 'pattern', 'theatre', 'earlier', 'sponsor', 'indiana', 'objects', 'evening', 'nuclear', 'promote', 'appears', 'decided', 'designs', 'tourism', 'savings', 'graphic', 'binding', 'winning', 'atlanta', 'credits', 'clearly', 'enlarge', 'revenue', 'measure', 'flights', 'experts', 'vintage', 'exactly', 'explore', 'concept', 'reality', 'billion', 'nations', 'speaker', 'offices', 'managed', 'toronto', 'theater', 'springs', 'perform', 'healthy', 'drivers', 'figures', 'married', 'sharing', 'waiting', 'banking', 'conduct', 'calling', 'serving', 'matters', 'reduced', 'physics', 'extreme', 'samples', 'removed', 'singles', 'amounts', 'contain', 'crystal', 'academy', 'dynamic', 'regions', 'meaning', 'posting', 'instant', 'viewing', 'aspects', 'austria', 'utility', 'preview', 'despite', 'degrees', 'seeking', 'phoenix', 'comfort', 'smoking', 'becomes', 'alabama', 'achieve', 'carried', 'clothes', 'circuit', 'printed', 'removal', 'factory', 'revised', 'optical', 'amazing', 'feeling', 'bedroom', 'orlando', 'lawyers', 'advisor', 'remains', 'generic', 'transit', 'compact', 'keeping', 'attempt', 'matches', 'engines', 'stephen', 'climate', 'alcohol', 'walking', 'explain', 'smaller', 'modules', 'concern', 'holding', 'trouble', 'dealers', 'helping', 'totally', 'organic', 'leaving', 'cooking', 'respond', 'entered', 'belgium', 'highway', 'booking', 'portion', 'biology', 'ancient', 'leisure', 'learned', 'husband', 'britain', 'concert', 'adopted', 'carrier', 'reflect', 'deliver', 'lessons', 'treated', 'confirm', 'neither', 'offline', 'replace', 'reached', 'enabled', 'montana', 'enhance', 'adapter', 'laptops', 'editors', 'threads', 'supreme', 'weapons', 'episode', 'planned', 'antonio', 'charged', 'gourmet', 'orleans', 'prepare', 'illegal', 'lincoln', 'premier', 'consent', 'contest', 'chamber', 'typical', 'chicken', 'sending', 'tonight', 'spyware', 'formula', 'periods', 'attacks', 'resorts', 'biggest', 'visitor', 'gateway', 'drawing', 'ordered', 'happens', 'romance', 'focused', 'bargain', 'vermont', 'hunting', 'cutting', 'writers', 'mapping', 'indexed', 'cartoon', 'granted', 'choices', 'outlook', 'massive', 'denmark', 'poverty', 'patrick', 'mystery', 'journey', 'bidding', 'charter', 'barbara', 'blogger', 'reverse', 'deposit', 'seminar', 'specify', 'formats', 'depends', 'editing', 'notices', 'detroit', 'toshiba', 'airline', 'surveys', 'sitting', 'putting', 'vietnam', 'trailer', 'gardens', 'antique', 'willing', 'density', 'hundred', 'strange', 'mention', 'amended', 'operate', 'doctors', 'domains', 'siemens', 'capture', 'buffalo', 'camping', 'welfare', 'medline', 'massage', 'closing', 'monster', 'columns', 'cookies', 'cruises', 'forever', 'captain', 'imagine', 'heating', 'scripts', 'dealing', 'liberal', 'livecam', 'matthew', 'hobbies', 'console', 'shipped', 'voltage', 'anthony', 'loading', 'ongoing', 'imaging', 'betting', 'liberty', 'wyoming', 'convert', 'analyst', 'finland', 'derived', 'postage', 'applies', 'casinos', 'filters', 'capable', 'douglas', 'elected', 'victory', 'madison', 'citizen', 'anytime', 'lecture', 'genetic', 'promise', 'cabinet', 'tiffany', 'collect', 'streets', 'turning', 'inquiry', 'checked', 'exhibit', 'visible', 'mercury', 'victims', 'burning', 'coupons', 'russell', 'obvious', 'passing', 'awarded', 'clinton', 'masters', 'alberta', 'commons', 'arrival', 'pottery', 'awesome', 'mexican', 'desired', 'assumes', 'heights', 'firefox', 'expense', 'venture', 'healing', 'studios', 'buttons', 'winners', 'rolling', 'arrived', 'creates', 'tourist', 'senator', 'lodging', 'stopped', 'closely', 'visited', 'targets', 'counsel', 'invited', 'farmers', 'queries', 'ukraine', 'absence', 'nearest', 'cluster', 'vendors', 'whereas', 'partial', 'couples', 'ranking', 'simpson', 'sublime', 'trusted', 'receipt', 'knowing', 'uniform', 'dancing', 'publish', 'pioneer', 'acrobat', 'thermal', 'telling', 'coastal', 'charity', 'hungary', 'segment', 'realize', 'insight', 'secrets', 'philips', 'penalty', 'glasses', 'enables', 'builder', 'jessica', 'stewart', 'outcome', 'centres', 'charger', 'cooling', 'divorce', 'shopper', 'exposed', 'telecom', 'founded', 'chronic', 'trained', 'tobacco', 'roberts', 'pension', 'worship', 'damages', 'diverse', 'passion', 'defence', 'patches', 'divided', 'insider', 'pleased', 'vitamin', 'genuine', 'raising', 'billing', 'combine', 'examine', 'bristol', 'sectors', 'grounds', 'regards', 'baskets', 'studied', 'profits', 'florist', 'deutsch', 'funeral', 'enjoyed', 'charlie', 'francis', 'noticed', 'signals', 'symbols', 'packard', 'holders', 'swedish', 'witness', 'collins', 'gadgets', 'glasgow', 'impacts', 'induced', 'linking', 'appeals', 'illness', 'islamic', 'pending', 'lebanon', 'kennedy', 'teenage', 'vincent', 'secured', 'unusual', 'routine', 'toolbar', 'wearing', 'mounted', 'habitat', 'scanner', 'integer', 'engaged', 'falling', 'dropped', 'besides', 'moments', 'strings', 'torture', 'deleted', 'antenna', 'assumed', 'killing', 'memphis', 'harvard', 'brokers', 'podcast', 'seasons', 'latinas', 'suppose', 'involve', 'younger', 'rapidly', 'outline', 'holland', 'demands', 'careful', 'tracked', 'minimal', 'lottery', 'licence', 'romania', 'consult', 'greatly', 'cycling', 'turkish', 'pentium', 'quantum', 'largely', 'pointer', 'stretch', 'permits', 'cleaner', 'cricket', 'feeding', 'olympic', 'customs', 'rainbow', 'decline', 'israeli', 'hewlett', 'proceed', 'jamaica', 'britney', 'katrina', 'founder', 'dispute', 'adverse', 'excerpt', 'perfume', 'restore', 'creator', 'museums', 'tracker', 'passage', 'jelsoft', 'headset', 'oakland', 'suicide', 'logical', 'extract', 'payable', 'retired', 'remarks', 'decades', 'arising', 'railway', 'pointed', 'causing', 'mistake', 'mineral', 'fortune', 'claimed', 'screens', 'planner', 'croatia', 'stadium', 'edwards', 'costume', 'norfolk', 'painted', 'artwork', 'ethical', 'schemes', 'neutral', 'bedding', 'joining', 'heading', 'equally', 'bearing', 'seniors', 'violent', 'cottage', 'mozilla', 'anymore', 'locator', 'resolve', 'melissa', 'nigeria', 'ceiling', 'anybody', 'singing', 'optimal', 'sucking', 'reuters', 'manuals', 'watched', 'thereof', 'ranging', 'repairs', 'hanging', 'colored', 'routing', 'stanley', 'elegant', 'renewal', 'opposed', 'scoring', 'sisters', 'critics', 'madonna', 'soldier', 'mirrors', 'assault', 'bowling', 'solving', 'deviant', 'imports', 'suspect', 'crucial', 'tuition', 'threats', 'puzzles', 'damaged', 'destroy', 'olympus', 'starter', 'luggage', 'stylish', 'grocery', 'kenneth', 'jackets', 'excited', 'recover', 'delayed', 'exclude', 'anxiety', 'spatial', 'ceramic', 'fingers', 'raleigh', 'qualify', 'diagram', 'beijing', 'peoples', 'advised', 'calgary', 'interim', 'approve', 'calcium', 'newport', 'indians', 'harvest', 'locally', 'mothers', 'iceland', 'candles', 'sailing', 'morocco', 'refused', 'ecology', 'verizon', 'silicon', 'compete', 'beatles', 'thomson', 'seating', 'exports', 'heather', 'warrant', 'solaris', 'royalty', 'somehow', 'laundry', 'solomon', 'placing', 'filling', 'imposed', 'silence', 'focuses', 'trainer', 'volumes', 'bizrate', 'implied', 'packing', 'statute', 'satisfy', 'shelter', 'bahamas', 'mixture', 'logging', 'hampton', 'borders', 'nursery', 'staying', 'estonia', 'veteran', 'streams', 'landing', 'signing', 'namibia', 'prairie', 'reunion', 'ecuador', 'spirits', 'acquire', 'tribune', 'viruses', 'cheaper', 'leasing', 'beliefs', 'analyze', 'forests', 'invalid', 'martial', 'execute', 'patents', 'accused', 'defines', 'packets', 'trigger', 'beaches', 'folders', 'routers', 'pendant', 'dresses', 'baptist', 'females', 'wallace', 'cuisine', 'myspace', 'theorem', 'radical', 'levitra', 'elderly', 'bolivia', 'hottest', 'stevens', 'alleged', 'compile', 'webster', 'curious', 'freight', 'eclipse', 'shuttle', 'affects', 'ampland', 'fastest', 'injured', 'payroll', 'courier', 'tribute', 'genesis', 'barrier', 'bicycle', 'letting', 'thereby', 'bottles', 'bangkok', 'jeffrey', 'survive', 'clinics', 'offense', 'protest', 'washing', 'enquiry', 'closure', 'intense', 'showers', 'plugins', 'sensors', 'freebsd', 'heavily', 'headers', 'devoted', 'hormone', 'timothy', 'bridges', 'casting', 'shortly', 'cameron', 'andreas', 'warrior', 'diploma', 'copying', 'grammar', 'gregory', 'boolean', 'chances', 'bizarre', 'startup', 'kissing', 'reduces', 'capitol', 'nervous', 'extends', 'replica', 'trinity', 'handled', 'legends', 'exhaust', 'farming', 'handles', 'skilled', 'harmony', 'windsor', 'escorts', 'coaches', 'vessels', 'arrange', 'unified', 'ignored', 'precise', 'invoice', 'forming', 'embassy', 'rebecca', 'amongst', 'horizon', 'toolbox', 'surfing', 'attract', 'disable', 'succeed', 'leonard', 'lending', 'raymond', 'midwest', 'karaoke', 'observe', 'bermuda', 'mobiles', 'terrace', 'replied', 'seafood', 'durable', 'spencer', 'wichita', 'uruguay', 'tablets', 'futures', 'wanting', 'custody', 'scratch', 'beverly', 'bernard', 'fujitsu', 'rewards', 'beneath', 'seventh', 'soonest', 'lenders', 'fitting', 'surplus', 'belarus', 'gravity', 'guitars', 'essence', 'chester', 'stomach', 'bradley', 'anatomy', 'defects', 'clarity', 'settled', 'folding', 'airfare', 'vaccine', 'minolta', 'implies', 'tension', 'lexmark', 'eternal', 'bennett', 'hotmail', 'armenia', 'closest', 'actress', 'dietary', 'marilyn', 'findlaw', 'matched', 'carroll', 'warming', 'finnish', 'muslims', 'footage', 'reveals', 'carries', 'tunisia', 'bahrain', 'consist', 'naughty', 'hazards', 'insured', 'algebra', 'shadows', 'sunrise', 'weblogs', 'belongs', 'readily', 'ensures', 'lindsay', 'legally', 'briefly', 'fighter', 'retreat', 'adapted', 'estates', 'borough', 'failing', 'andrews', 'modular', 'brandon', 'balloon', 'winston', 'gratuit', 'porsche', 'hopkins', 'infants', 'revenge', 'expires', 'enemies', 'accepts', 'roughly', 'sticker', 'cornell', 'deserve', 'worried', 'garbage', 'phrases', 'chelsea', 'reaches', 'quizzes', 'planets', 'deficit', 'boulder', 'coupled', 'myanmar', 'texture', 'antigua', 'thunder', 'caution', 'pirates', 'origins', 'textile', 'indexes', 'licking', 'markers', 'weights', 'albania', 'lasting', 'webcams', 'surname', 'evident', 'allergy', 'twisted', 'pokemon', 'algeria', 'blessed', 'cardiff', 'reforms', 'lawsuit', 'banners', 'circles', 'passive', 'courage', 'gazette', 'hitachi', 'coleman', 'anaheim', 'knights', 'altered', 'pontiac', 'propose', 'lighter', 'tactics', 'shorter', 'relying', 'refined', 'fifteen', 'predict', 'acrylic', 'shannon', 'toddler', 'blocked', 'strikes', 'dressed', 'belfast', 'niagara', 'prepaid', 'electro', 'varying', 'trustee', 'tsunami', 'scholar', 'expedia', 'geology', 'coating', 'cleared', 'smilies', 'boating', 'shakira', 'corners', 'broader', 'hitting', 'aquatic', 'gilbert', 'ferrari', 'outputs', 'insulin', 'assured', 'mysimon', 'ambient', 'utilize', 'mileage', 'adaptor', 'hyundai', 'vampire', 'relates', 'framing', 'picking', 'pockets', 'inquire', 'realtor', 'chassis', 'pushing', 'prefers', 'bedford', 'mustang', 'emerald', 'abraham', 'webpage', 'hostels', 'senegal', 'muscles', 'wrapped', 'runtime', 'distant', 'decimal', 'penguin', 'magical', 'miracle', 'reprint', 'hamburg', 'fathers', 'terrain', 'rangers', 'labeled', 'marking', 'sheriff', 'griffin', 'preston', 'longest', 'nirvana', 'destiny', 'madness', 'blowing', 'cardiac', 'vatican', 'seekers', 'numeric', 'skating', 'emperor', 'rebates', 'proudly', 'pulling', 'obesity', 'touring', 'andorra', 'expired', 'travels', 'delight', 'counted', 'declare', 'teaches', 'tragedy', 'painful', 'prayers', 'nowhere', 'rebound', 'baghdad', 'concord', 'liberia', 'sherman', 'workout', 'stating', 'triumph', 'welding', 'locking', 'blanket', 'elliott', 'justify', 'possess', 'vanilla', 'pursuit', 'backing', 'endless', 'figured', 'moldova', 'pressed', 'scanned', 'surgeon', 'missile', 'deborah', 'assists', 'gabriel', 'auditor', 'prophet', 'bracket', 'harmful', 'glucose', 'phantom', 'persian', 'grenada', 'earning', 'mailman', 'somalia', 'novelty', 'budgets', 'toolkit', 'erotica', 'viewers', 'resumes', 'existed', 'optimum', 'refresh', 'webcast', 'stripes', 'expects', 'jumping', 'fabrics', 'polymer', 'hygiene', 'poultry', 'bouquet', 'mandate', 'indices', 'halifax', 'donated', 'stuffed', 'insects', 'cleanup', 'compute', 'redhead', 'arrives', 'tractor', 'touched', 'cologne', 'wishing', 'outlets', 'search', 'online', 'people', 'health', 'system', 'policy', 'number', 'please', 'rights', 'public', 'school', 'review', 'united', 'center', 'travel', 'report', 'member', 'hotels', 'office', 'design', 'posted', 'within', 'states', 'family', 'prices', 'sports', 'county', 'access', 'change', 'rating', 'return', 'events', 'little', 'movies', 'source', 'author', 'around', 'course', 'canada', 'credit', 'estate', 'select', 'photos', 'thread', 'market', 'really', 'action', 'series', 'second', 'forums', 'better', 'friend', 'server', 'issues', 'street', 'things', 'person', 'mobile', 'offers', 'recent', 'stores', 'memory', 'social', 'august', 'create', 'single', 'latest', 'status', 'browse', 'seller', 'always', 'result', 'groups', 'making', 'future', 'london', 'become', 'garden', 'listed', 'energy', 'images', 'notice', 'others', 'format', 'months', 'safety', 'common', 'living', 'called', 'period', 'window', 'france', 'region', 'island', 'record', 'direct', 'update', 'either', 'centre', 'europe', 'topics', 'videos', 'global', 'player', 'lyrics', 'submit', 'amount', 'though', 'thanks', 'weight', 'choose', 'points', 'camera', 'domain', 'beauty', 'models', 'simple', 'friday', 'annual', 'google', 'church', 'method', 'active', 'figure', 'enough', 'higher', 'yellow', 'french', 'nature', 'orders', 'africa', 'growth', 'agency', 'monday', 'income', 'engine', 'double', 'screen', 'across', 'needed', 'season', 'effect', 'sunday', 'casino', 'volume', 'anyone', 'silver', 'inside', 'mature', 'rather', 'supply', 'robert', 'skills', 'advice', 'career', 'rental', 'middle', 'taking', 'values', 'coming', 'object', 'length', 'client', 'follow', 'sample', 'george', 'choice', 'artist', 'levels', 'letter', 'phones', 'summer', 'degree', 'button', 'matter', 'custom', 'almost', 'editor', 'female', 'thomas', 'cancer', 'reason', 'spring', 'answer', 'police', 'wanted', 'unique', 'survey', 'animal', 'mexico', 'secure', 'simply', 'paypal', 'option', 'master', 'valley', 'larger', 'impact', 'strong', 'ground', 'owners', 'cities', 'ensure', 'budget', 'guides', 'amazon', 'retail', 'useful', 'trying', 'mother', 'joined', 'modern', 'senior', 'charge', 'normal', 'entire', 'output', 'likely', 'indian', 'dating', 'filter', 'longer', 'behind', 'german', 'buying', 'allows', 'boards', 'string', 'unless', 'target', 'except', 'moving', 'brands', 'places', 'pretty', 'winter', 'boston', 'medium', 'papers', 'awards', 'studio', 'reader', 'device', 'remote', 'theory', 'remove', 'visual', 'martin', 'manual', 'agents', 'repair', 'sector', 'jersey', 'father', 'quotes', 'driver', 'campus', 'beyond', 'museum', 'former', 'parent', 'bottom', 'detail', 'switch', 'titles', 'basket', 'weekly', 'demand', 'square', 'nation', 'module', 'resort', 'random', 'motion', 'forest', 'couple', 'giving', 'chance', 'vision', 'ending', 'listen', 'accept', 'lowest', 'highly', 'appear', 'actual', 'coffee', 'easily', 'poster', 'closed', 'league', 'minute', 'effort', 'fields', 'breast', 'kansas', 'doctor', 'reduce', 'enable', 'leader', 'israel', 'flight', 'pocket', 'factor', 'stream', 'signed', 'errors', 'worked', 'sorted', 'expert', 'became', 'orange', 'marine', 'guitar', 'saying', 'spirit', 'claims', 'branch', 'manage', 'corner', 'oregon', 'tables', 'define', 'racing', 'column', 'plants', 'avenue', 'mental', 'viewed', 'moment', 'attack', 'damage', 'placed', 'dollar', 'bridge', 'native', 'played', 'shirts', 'profit', 'expect', 'russia', 'golden', 'senate', 'forces', 'turned', 'delete', 'signal', 'issued', 'sexual', 'flower', 'passed', 'stated', 'hawaii', 'covers', 'adults', 'script', 'served', 'dining', 'dakota', 'handle', 'pubmed', 'looked', 'logged', 'laptop', 'nearly', 'forgot', 'origin', 'gaming', 'faster', 'dallas', 'bought', 'broken', 'alaska', 'battle', 'equity', 'speech', 'shared', 'sounds', 'forced', 'height', 'obtain', 'remain', 'failed', 'secret', 'austin', 'andrew', 'assets', 'injury', 'joseph', 'lawyer', 'portal', 'gratis', 'toward', 'assist', 'comics', 'houses', 'postal', 'finish', 'daniel', 'brazil', 'static', 'hunter', 'famous', 'writer', 'gender', 'vendor', 'junior', 'ladies', 'ticket', 'agreed', 'soccer', 'import', 'christ', 'scheme', 'manner', 'matrix', 'turkey', 'proper', 'inches', 'shares', 'colors', 'appeal', 'cruise', 'disney', 'drives', 'dealer', 'nearby', 'happen', 'miller', 'caused', 'luxury', 'frames', 'indeed', 'easier', 'adding', 'mostly', 'taylor', 'prints', 'suites', 'hidden', 'serial', 'relief', 'planet', 'copies', 'recipe', 'permit', 'seeing', 'tennis', 'bureau', 'pieces', 'dinner', 'sydney', 'stress', 'trends', 'fourth', 'wilson', 'charts', 'census', 'poetry', 'lights', 'forget', 'sister', 'newest', 'extent', 'export', 'sweden', 'backup', 'spread', 'expand', 'jordan', 'affect', 'virgin', 'raised', 'blonde', 'albums', 'cheats', 'guests', 'hosted', 'nevada', 'agenda', 'anyway', 'tracks', 'prince', 'circle', 'grants', 'edward', 'launch', 'symbol', 'crafts', 'fiscal', 'styles', 'denver', 'filled', 'notify', 'cables', 'cotton', 'dental', 'killed', 'border', 'debate', 'starts', 'causes', 'opened', 'scores', 'comedy', 'weblog', 'linear', 'edited', 'jewish', 'linked', 'wonder', 'begins', 'reform', 'alerts', 'assume', 'howard', 'leaves', 'checks', 'safari', 'tested', 'formal', 'hockey', 'showed', 'cancel', 'limits', 'outlet', 'winner', 'potter', 'modify', 'oxford', 'patent', 'eating', 'mirror', 'kernel', 'stocks', 'buyers', 'taiwan', 'chosen', 'greece', 'nights', 'behalf', 'liquid', 'salary', 'saving', 'empire', 'resume', 'twenty', 'avatar', 'helped', 'decide', 'guinea', 'muscle', 'attend', 'shower', 'holdem', 'seemed', 'finder', 'unable', 'insert', 'alumni', 'themes', 'powers', 'heaven', 'norway', 'asking', 'blocks', 'bodies', 'paying', 'carbon', 'crisis', 'bright', 'header', 'formed', 'sheets', 'puerto', 'plasma', 'banner', 'dreams', 'stands', 'latina', 'wheels', 'router', 'poland', 'folder', 'womens', 'upload', 'voting', 'courts', 'regard', 'exists', 'smooth', 'strike', 'narrow', 'threat', 'castle', 'missed', 'labels', 'acting', 'stored', 'stable', 'lesson', 'cinema', 'severe', 'deluxe', 'fabric', 'visits', 'flying', 'berlin', 'pounds', 'desire', 'caught', 'marked', 'driven', 'bottle', 'rubber', 'legend', 'python', 'entity', 'holder', 'duties', 'ethics', 'dragon', 'brings', 'stereo', 'commit', 'jacket', 'oracle', 'excess', 'stamps', 'mining', 'garage', 'thongs', 'morgan', 'prayer', 'cheese', 'fetish', 'apache', 'fellow', 'lounge', 'hilton', 'horror', 'debian', 'mainly', 'ethnic', 'occurs', 'layout', 'horses', 'donate', 'taught', 'worker', 'temple', 'breaks', 'waters', 'prefer', 'harris', 'toyota', 'vector', 'shaved', 'buffer', 'purple', 'mutual', 'syntax', 'prison', 'chairs', 'sierra', 'desert', 'oldest', 'steven', 'summit', 'spaces', 'escape', 'cialis', 'glance', 'arcade', 'filing', 'foster', 'trials', 'tissue', 'aspect', 'counts', 'priced', 'closer', 'shadow', 'riding', 'clinic', 'korean', 'packet', 'funded', 'extend', 'dublin', 'nelson', 'murder', 'grades', 'digest', 'rescue', 'losses', 'combat', 'abroad', 'arthur', 'walker', 'gordon', 'serves', 'palace', 'verify', 'copper', 'nobody', 'cloudy', 'plenty', 'throat', 'ignore', 'wealth', 'vacuum', 'writes', 'plates', 'essays', 'fairly', 'config', 'stupid', 'harbor', 'puzzle', 'rising', 'latter', 'repeat', 'pupils', 'casual', 'polish', 'lovely', 'extras', 'clause', 'troops', 'indoor', 'broker', 'trucks', 'partly', 'donald', 'sensor', 'angels', 'deputy', 'sealed', 'loaded', 'scenes', 'finger', 'locate', 'wooden', 'motors', 'shorts', 'johnny', 'facing', 'refund', 'trembl', 'emails', 'cyprus', 'makers', 'hearts', 'carter', 'legacy', 'danger', 'widely', 'phrase', 'hybrid', 'bigger', 'diesel', 'versus', 'exceed', 'babies', 'albert', 'graham', 'compaq', 'slowly', 'infant', 'samuel', 'unlike', 'wright', 'proven', 'cached', 'warren', 'comply', 'cherry', 'webcam', 'nutten', 'quebec', 'dennis', 'socket', 'silent', 'humans', 'analog', 'facial', 'talent', 'seeker', 'wisdom', 'offset', 'payday', 'philip', 'stages', 'powder', 'assess', 'stones', 'losing', 'gospel', 'knight', 'earned', 'parker', 'triple', 'cooper', 'titans', 'sought', 'median', 'herein', 'basics', 'carpet', 'struct', 'lenses', 'binary', 'walter', 'warner', 'inkjet', 'wizard', 'actors', 'liable', 'morris', 'eminem', 'recall', 'picked', 'belief', 'bikini', 'lookup', 'ottawa', 'refine', 'bidder', 'singer', 'herald', 'plugin', 'diving', 'invite', 'terror', 'thirty', 'refers', 'victim', 'arrive', 'sunset', 'framed', 'inform', 'murray', 'intent', 'oxygen', 'cookie', 'canyon', 'meters', 'merely', 'passes', 'durham', 'muslim', 'sleeve', 'stroke', 'sharon', 'gloves', 'skiing', 'flickr', 'timing', 'denied', 'deaths', 'rivers', 'thumbs', 'twelve', 'decade', 'titten', 'drinks', 'voices', 'honest', 'albany', 'coding', 'hiking', 'pierre', 'arabia', 'panama', 'athens', 'judges', 'walked', 'nissan', 'afraid', 'norton', 'locked', 'fusion', 'canvas', 'parish', 'coupon', 'nurses', 'tagged', 'killer', 'bishop', 'pulled', 'shaped', 'farmer', 'heroes', 'floral', 'fisher', 'spears', 'worlds', 'guilty', 'tablet', 'crimes', 'moscow', 'thesis', 'pixels', 'totals', 'afford', 'turner', 'spoken', 'stayed', 'redeem', 'rogers', 'regime', 'wishes', 'depend', 'differ', 'monica', 'breath', 'candle', 'herbal', 'loving', 'deemed', 'hacker', 'madrid', 'margin', 'solely', 'norman', 'headed', 'voters', 'murphy', 'thinks', 'justin', 'tricks', 'panels', 'tongue', 'danish', 'monkey', 'invest', 'lovers', 'atomic', 'approx', 'arabic', 'rachel', 'chains', 'engage', 'quoted', 'bronze', 'sender', 'spouse', 'exotic', 'viewer', 'signup', 'proved', 'salmon', 'butter', 'pepper', 'weapon', 'burden', 'finest', 'realty', 'autumn', 'toilet', 'ranked', 'routes', 'packed', 'timely', 'talked', 'villas', 'peeing', 'brooks', 'newton', 'whilst', 'prompt', 'ebooks', 'victor', 'attach', 'spider', 'ranges', 'trails', 'hudson', 'divine', 'dialog', 'venues', 'shield', 'prague', 'pickup', 'nascar', 'sacred', 'chrome', 'oliver', 'delays', 'scored', 'lambda', 'belong', 'barnes', 'rabbit', 'unions', 'frozen', 'scales', 'strain', 'yamaha', 'hebrew', 'gained', 'adjust', 'soviet', 'treaty', 'vienna', 'chapel', 'layers', 'guided', 'powell', 'radius', 'harder', 'stuart', 'monroe', 'tender', 'clouds', 'easter', 'praise', 'jeremy', 'venice', 'hardly', 'absent', 'hoping', 'bubble', 'vessel', 'lauren', 'ashley', 'scroll', 'relate', 'suffer', 'retain', 'tunnel', 'genres', 'beaver', 'kijiji', 'eagles', 'anchor', 'parade', 'hiring', 'clocks', 'surely', 'stylus', 'arnold', 'chicks', 'cattle', 'reload', 'kuwait', 'struck', 'bridal', 'tribal', 'rebate', 'meetup', 'cycles', 'detect', 'butler', 'techno', 'immune', 'latvia', 'rarely', 'martha', 'trains', 'metals', 'celtic', 'advise', 'boxing', 'hughes', 'reveal', 'watson', 'strict', 'saddam', 'inline', 'timber', 'ruling', 'steady', 'hourly', 'geneva', 'handed', 'intake', 'tucson', 'assure', 'sodium', 'thehun', 'decent', 'dayton', 'carlos', 'valium', 'uganda', 'trivia', 'adidas', 'harvey', 'hazard', 'fruits', 'ribbon', 'suzuki', 'exempt', 'dishes', 'refuse', 'trades', 'superb', 'floors', 'speaks', 'burton', 'copied', 'scotia', 'gibson', 'roller', 'nicole', 'latino', 'mixing', 'fitted', 'asthma', 'reward', 'zambia', 'sprint', 'inputs', 'genome', 'knives', 'honors', 'fallen', 'sussex', 'gather', 'backed', 'alfred', 'motels', 'slight', 'msgstr', 'arrest', 'adipex', 'deeply', 'marina', 'prizes', 'bosnia', 'optics', 'pursue', 'plains', 'lonely', 'hereby', 'collar', 'racial', 'amanda', 'kelkoo', 'novels', 'safely', 'finite', 'kidney', 'allied', 'throws', 'roster', 'nasdaq', 'tuning', 'gotten', 'rocket', 'bullet', 'towers', 'duncan', 'priest', 'ronald', 'trance', 'locale', 'bundle', 'hammer', 'runner', 'notion', 'mailed', 'arctic', 'defend', 'stolen', 'agrees', 'cheers', 'zoning', 'mighty', 'holmes', 'galaxy', 'caring', 'itunes', 'buried', 'newbie', 'darwin', 'milton', 'marker', 'sandra', 'monaco', 'belize', 'robust', 'porter', 'jungle', 'alpine', 'andale', 'cooler', 'shapes', 'andrea', 'breeds', 'rapids', 'bailey', 'eugene', 'metric', 'joshua', 'varied', 'grande', 'assign', 'tigers', 'aurora', 'slides', 'lender', 'chorus', 'rhythm', 'argued', 'clarke', 'sudden', 'claire', 'speeds', 'vocals', 'chubby', 'burner', 'gentle', 'deeper', 'worthy', 'saints', 'helena', 'marion', 'cowboy', 'queens', 'tribes', 'defeat', 'clicks', 'harper', 'tenant', 'tattoo', 'freely', 'marcus', 'nudist', 'remedy', 'genius', 'barely', 'pamela', 'marble', 'surrey', 'belkin', 'giants', 'solved', 'magnet', 'cayman', 'jaguar', 'posing', 'urgent', 'gothic', 'graphs', 'patrol', 'divide', 'mailto', 'boring', 'schema', 'prefix', 'barrel', 'typing', 'harold', 'floppy', 'namely', 'aerial', 'makeup', 'nathan', 'tobago', 'wicked', 'pushed', 'reggae', 'saturn', 'enzyme', 'zshops', 'planes', 'tackle', 'ambien', 'vernon', 'builds', 'leslie', 'favors', 'potato', 'sticks', 'excuse', 'strand', 'cheque', 'reject', 'italic', 'valued', 'batman', 'luther', 'settle', 'palmer', 'scenic', 'sewing', 'munich', 'celebs', 'trusts', 'pillow', 'harley', 'finals', 'parcel', 'rolled', 'flavor', 'hungry', 'malawi', 'curtis', 'lesser', 'charms', 'trader', 'denial', 'thrown', 'raises', 'ballot', 'angola', 'helmet', 'nickel', 'wallet', 'coated', 'intend', 'louise', 'beings', 'habits', 'accent', 'eleven', 'auburn', 'unlock', 'pledge', 'angela', 'merger', 'nextel', 'rwanda', 'riders', 'remark', 'dozens', 'varies', 'guards', 'kruger', 'granny', 'fleece', 'pierce', 'breach', 'wiring', 'pastor', 'calvin', 'phases', 'ballet', 'bumper', 'garlic', 'banned', 'briefs', 'mumbai', 'radios', 'tariff', 'nvidia', 'hostel', 'employ', 'yearly', 'marvel', 'petite', 'strips', 'gossip', 'rotary', 'kinase', 'skirts', 'serbia', 'guyana', 'deadly', 'rounds', 'dosage', 'baking', 'needle', 'brakes', 'sticky', 'heated', 'jackie', 'adrian', 'brutal', 'yields', 'suited', 'blacks', 'curves', 'vertex', 'tomato', 'waiver', 'garcia', 'valves', 'donors', 'bufing', 'julian', 'velvet', 'wesley', 'lately', 'brunei', 'banana', 'slovak', 'remind', 'affair', 'washer', 'beside', 'mentor', 'fought', 'metres', 'pencil', 'freeze', 'titled', 'sphere', 'ratios', 'walnut', 'ladder', 'italia', 'hansen', 'condos', 'gently', 'fridge', 'fraser', 'blades', 'trauma', 'advert', 'subaru', 'picnic', 'hollow', 'groove', 'sleeps', 'travis', 'heater', 'colony', 'cannon', 'circus', 'forbes', 'cooked', 'gerald', 'hunger', 'mariah', 'cement', 'closes', 'violin', 'naples', 'modems', 'prozac', 'newark', 'turtle', 'warned', 'neural', 'fossil', 'apollo', 'greene', 'robots', 'nested', 'movers', 'verbal', 'bryant', 'voyuer', 'garmin', 'render', 'carmen', 'impose', 'enters', 'savage', 'willow', 'barbie', 'roland', 'mounts', 'michel', 'subtle', 'cradle', 'virtue', 'corpus', 'shades', 'adware', 'zoloft', 'ultram', 'cursor', 'maiden', 'viking', 'myrtle', 'bother', 'bhutan', 'mating', 'unwrap', 'resist', 'wagner', 'ranger', 'newman', 'scared', 'asylum', 'robbie', 'poison', 'first', 'would', 'click', 'price', 'state', 'email', 'world', 'music', 'video', 'books', 'links', 'years', 'order', 'items', 'group', 'games', 'could', 'great', 'hotel', 'store', 'terms', 'right', 'local', 'using', 'phone', 'forum', 'based', 'black', 'check', 'index', 'women', 'today', 'south', 'pages', 'found', 'house', 'photo', 'power', 'three', 'total', 'place', 'think', 'north', 'posts', 'media', 'water', 'since', 'guide', 'board', 'white', 'small', 'times', 'sites', 'level', 'hours', 'image', 'title', 'shall', 'class', 'still', 'money', 'every', 'visit', 'tools', 'reply', 'value', 'press', 'learn', 'print', 'stock', 'point', 'sales', 'large', 'table', 'start', 'model', 'human', 'movie', 'march', 'yahoo', 'going', 'study', 'staff', 'april', 'never', 'users', 'topic', 'party', 'login', 'legal', 'quote', 'story', 'rates', 'young', 'field', 'paper', 'girls', 'night', 'texas', 'poker', 'issue', 'range', 'court', 'audio', 'light', 'write', 'offer', 'given', 'files', 'event', 'china', 'needs', 'might', 'month', 'major', 'areas', 'space', 'cards', 'child', 'enter', 'share', 'added', 'radio', 'color', 'track', 'least', 'trade', 'david', 'green', 'close', 'drive', 'short', 'means', 'daily', 'beach', 'costs', 'style', 'front', 'parts', 'early', 'miles', 'sound', 'works', 'rules', 'final', 'adult', 'thing', 'cheap', 'third', 'gifts', 'cover', 'often', 'watch', 'deals', 'words', 'linux', 'james', 'heart', 'error', 'clear', 'makes', 'india', 'taken', 'known', 'cases', 'quick', 'whole', 'later', 'basic', 'shows', 'along', 'among', 'death', 'speed', 'brand', 'stuff', 'japan', 'loans', 'shoes', 'entry', 'notes', 'force', 'river', 'album', 'views', 'plans', 'build', 'types', 'lines', 'apply', 'asked', 'cross', 'weeks', 'lower', 'union', 'names', 'leave', 'teens', 'woman', 'cable', 'score', 'shown', 'flash', 'ideas', 'allow', 'homes', 'super', 'asian', 'cause', 'focus', 'rooms', 'voice', 'comes', 'brown', 'forms', 'glass', 'happy', 'smith', 'thank', 'prior', 'sport', 'ready', 'round', 'built', 'blood', 'earth', 'nokia', 'italy', 'basis', 'award', 'peter', 'extra', 'rated', 'quite', 'horse', 'stars', 'lists', 'owner', 'takes', 'bring', 'input', 'agent', 'valid', 'grand', 'trial', 'units', 'wrote', 'ships', 'metal', 'funds', 'guest', 'seems', 'trust', 'multi', 'grade', 'panel', 'floor', 'match', 'plant', 'sense', 'stage', 'goods', 'maybe', 'spain', 'youth', 'break', 'dance', 'apple', 'enjoy', 'block', 'civil', 'steel', 'songs', 'fixed', 'wrong', 'hands', 'paris', 'fully', 'worth', 'peace', 'coast', 'grant', 'agree', 'blogs', 'scale', 'stand', 'frame', 'chief', 'gives', 'heard', 'begin', 'royal', 'clean', 'bible', 'suite', 'vegas', 'chris', 'piece', 'sheet', 'seven', 'older', 'cells', 'looks', 'calls', 'whose', 'naked', 'lives', 'stone', 'tests', 'buyer', 'steve', 'label', 'scott', 'canon', 'waste', 'chair', 'phase', 'motor', 'shirt', 'crime', 'count', 'claim', 'patch', 'santa', 'alone', 'jones', 'saint', 'drugs', 'joint', 'fresh', 'dates', 'upper', 'prime', 'limit', 'began', 'louis', 'steps', 'shops', 'creek', 'urban', 'tours', 'labor', 'admin', 'heavy', 'solid', 'theme', 'touch', 'goals', 'serve', 'magic', 'mount', 'smart', 'latin', 'avoid', 'birth', 'virus', 'abuse', 'facts', 'faith', 'chain', 'moved', 'reach', 'sorry', 'gamma', 'truth', 'films', 'owned', 'draft', 'chart', 'jesus', 'clubs', 'equal', 'codes', 'kinds', 'teams', 'funny', 'tried', 'named', 'laser', 'harry', 'taxes', 'mouse', 'brain', 'dream', 'false', 'falls', 'stats', 'carry', 'hello', 'clips', 'brief', 'ended', 'eight', 'wants', 'alert', 'queen', 'sweet', 'diego', 'truck', 'votes', 'ocean', 'signs', 'depth', 'train', 'feeds', 'route', 'frank', 'anime', 'speak', 'query', 'rural', 'judge', 'bytes', 'fight', 'filed', 'korea', 'banks', 'kelly', 'leads', 'brian', 'miami', 'wales', 'minor', 'noted', 'spent', 'davis', 'helps', 'cycle', 'sleep', 'scene', 'drink', 'intel', 'rings', 'henry', 'guess', 'ahead', 'devel', 'delta', 'cisco', 'alpha', 'bonus', 'adobe', 'trees', 'dress', 'refer', 'babes', 'layer', 'spend', 'clock', 'ratio', 'proof', 'empty', 'maine', 'ideal', 'specs', 'parks', 'cream', 'boxes', 'hills', 'aware', 'shape', 'irish', 'firms', 'usage', 'mixed', 'exist', 'wheel', 'angel', 'width', 'noise', 'array', 'greek', 'sharp', 'occur', 'knows', 'coach', 'kevin', 'plate', 'logic', 'sizes', 'plain', 'costa', 'trail', 'buddy', 'setup', 'blues', 'scope', 'crazy', 'bears', 'mouth', 'meter', 'fruit', 'mysql', 'lewis', 'sugar', 'stick', 'allen', 'genre', 'slide', 'exact', 'bound', 'storm', 'micro', 'dolls', 'paint', 'delay', 'pilot', 'czech', 'novel', 'ultra', 'idaho', 'plays', 'truly', 'lodge', 'broad', 'swiss', 'sarah', 'clark', 'foods', 'guard', 'newly', 'raise', 'drama', 'bands', 'lunch', 'audit', 'polls', 'tower', 'jason', 'shell', 'solar', 'catch', 'doubt', 'tasks', 'const', 'doors', 'forth', 'bruce', 'split', 'twice', 'egypt', 'shift', 'simon', 'marks', 'loved', 'birds', 'saved', 'shots', 'moore', 'treat', 'piano', 'risks', 'ports', 'teach', 'rapid', 'hairy', 'dutch', 'boots', 'holds', 'pulse', 'metro', 'strip', 'pearl', 'heads', 'logos', 'honda', 'bills', 'opera', 'asset', 'blank', 'humor', 'lived', 'tight', 'meant', 'plane', 'meets', 'tampa', 'grace', 'susan', 'adams', 'villa', 'inner', 'roman', 'taste', 'trips', 'sides', 'turns', 'cache', 'lease', 'proud', 'giant', 'seats', 'alarm', 'usual', 'angle', 'vinyl', 'worst', 'honor', 'eagle', 'pants', 'nurse', 'quiet', 'comic', 'crown', 'maker', 'crack', 'picks', 'smoke', 'craft', 'apart', 'blind', 'coins', 'gross', 'epson', 'actor', 'finds', 'fifth', 'prize', 'dirty', 'wayne', 'alive', 'prove', 'wings', 'ridge', 'modem', 'larry', 'skill', 'moves', 'throw', 'trend', 'rhode', 'worse', 'boats', 'tells', 'fiber', 'graph', 'talks', 'bonds', 'fraud', 'roger', 'crash', 'inter', 'grove', 'spray', 'roads', 'faces', 'mayor', 'yield', 'hence', 'radar', 'lakes', 'diary', 'kings', 'flags', 'baker', 'shock', 'walls', 'ebony', 'drawn', 'beast', 'dodge', 'pizza', 'yards', 'woods', 'jokes', 'twiki', 'globe', 'dicke', 'kerry', 'ghost', 'pride', 'keith', 'linda', 'chile', 'maria', 'brass', 'plaza', 'quest', 'trans', 'booty', 'acres', 'venue', 'vital', 'excel', 'modes', 'enemy', 'wells', 'opens', 'lucky', 'thick', 'iraqi', 'vista', 'chips', 'terry', 'flood', 'arena', 'grown', 'jerry', 'smile', 'lands', 'armed', 'laura', 'tokyo', 'nikon', 'candy', 'pills', 'tiger', 'folks', 'boost', 'icons', 'moral', 'keeps', 'pound', 'roses', 'bread', 'tough', 'gonna', 'chest', 'billy', 'craig', 'solve', 'nancy', 'tones', 'sight', 'towns', 'worry', 'reads', 'roles', 'glory', 'saudi', 'fault', 'karen', 'jimmy', 'rugby', 'fluid', 'barry', 'devil', 'grass', 'marie', 'kenya', 'sized', 'manga', 'theft', 'swing', 'dated', 'shoot', 'elite', 'poems', 'robot', 'winds', 'gnome', 'roots', 'noble', 'shore', 'loves', 'loose', 'slots', 'rocks', 'genes', 'hosts', 'atlas', 'feels', 'ralph', 'corps', 'liver', 'decor', 'texts', 'evans', 'fails', 'aging', 'alice', 'intro', 'clerk', 'mills', 'jeans', 'fonts', 'favor', 'sigma', 'xhtml', 'aside', 'essay', 'camps', 'aaron', 'trace', 'packs', 'spoke', 'arrow', 'rough', 'weird', 'holes', 'blade', 'meals', 'robin', 'strap', 'crowd', 'cloud', 'valve', 'knife', 'shelf', 'liked', 'adopt', 'fotos', 'outer', 'tales', 'islam', 'nodes', 'seeds', 'cited', 'skype', 'tired', 'steam', 'acute', 'stood', 'carol', 'stack', 'curve', 'amber', 'trunk', 'waves', 'camel', 'lamps', 'juice', 'chase', 'sauce', 'beads', 'flows', 'fewer', 'proxy', 'lanka', 'voted', 'bikes', 'gates', 'slave', 'lycos', 'zdnet', 'combo', 'charm', 'basin', 'ranch', 'drunk', 'toner', 'latex', 'delhi', 'alien', 'broke', 'nepal', 'nylon', 'discs', 'rocky', 'fleet', 'bunch', 'cents', 'omega', 'civic', 'saver', 'grill', 'grain', 'wanna', 'seeks', 'gains', 'spots', 'salon', 'turbo', 'thats', 'aimed', 'reset', 'brush', 'spare', 'kodak', 'skirt', 'honey', 'gauge', 'faced', 'sixth', 'farms', 'cheat', 'sandy', 'macro', 'laugh', 'pitch', 'autos', 'perry', 'dozen', 'teeth', 'cloth', 'stamp', 'lotus', 'cargo', 'salem', 'likes', 'tapes', 'zones', 'races', 'maple', 'depot', 'blend', 'julie', 'janet', 'phpbb', 'probe', 'helen', 'lopez', 'debug', 'chuck', 'ebook', 'bingo', 'minds', 'xanax', 'sunny', 'leeds', 'cedar', 'blair', 'hopes', 'mason', 'burns', 'pumps', 'mario', 'utils', 'pairs', 'chose', 'blast', 'tommy', 'brake', 'congo', 'olive', 'cyber', 'clone', 'dicks', 'relay', 'tears', 'oasis', 'angry', 'lover', 'rolls', 'malta', 'daddy', 'ferry', 'omaha', 'loads', 'motel', 'rally', 'dying', 'stuck', 'stops', 'vocal', 'organ', 'lemon', 'toxic', 'bench', 'rider', 'butts', 'bobby', 'sheep', 'wines', 'salad', 'paste', 'katie', 'relax', 'sword', 'sells', 'coral', 'pixel', 'float', 'colin', 'paths', 'acids', 'dairy', 'admit', 'fancy', 'samoa', 'squad', 'wages', 'males', 'chaos', 'wheat', 'bases', 'unity', 'bride', 'begun', 'socks', 'essex', 'fever', 'drums', 'rover', 'flame', 'tanks', 'spell', 'emily', 'annex', 'sudan', 'hints', 'wired', 'elvis', 'argue', 'arise', 'jamie', 'chess', 'oscar', 'menus', 'canal', 'amino', 'herbs', 'lying', 'drill', 'bryan', 'hobby', 'tries', 'trick', 'myers', 'drops', 'wider', 'screw', 'blame', 'fifty', 'uncle', 'jacob', 'randy', 'brick', 'naval', 'donna', 'cabin', 'eddie', 'fired', 'perth', 'syria', 'klein', 'tires', 'retro', 'anger', 'suits', 'glenn', 'handy', 'crops', 'guild', 'tribe', 'batch', 'alter', 'ghana', 'edges', 'twins', 'amend', 'chick', 'thong', 'medal', 'walks', 'booth', 'indie', 'bones', 'breed', 'polar', 'msgid', 'carey', 'danny', 'patio', 'lloyd', 'beans', 'ellis', 'snake', 'julia', 'berry', 'ought', 'fixes', 'sends', 'mazda', 'timer', 'tyler', 'verse', 'highs', 'ellen', 'racks', 'nasty', 'tumor', 'watts', 'forty', 'tubes', 'floyd', 'queue', 'skins', 'exams', 'welsh', 'belly', 'haiti', 'elder', 'sonic', 'thumb', 'twist', 'ranks', 'debut', 'volvo', 'penny', 'ivory', 'remix', 'alias', 'newer', 'spice', 'ascii', 'donor', 'trash', 'manor', 'diane', 'disco', 'endif', 'minus', 'milan', 'shade', 'digit', 'lions', 'pools', 'lyric', 'grave', 'howto', 'devon', 'saves', 'lobby', 'punch', 'gotta', 'karma', 'betty', 'lucas', 'mardi', 'shake', 'holly', 'silly', 'mercy', 'fence', 'diana', 'shame', 'fatal', 'flesh', 'jesse', 'qatar', 'sheer', 'witch', 'cohen', 'puppy', 'kathy', 'smell', 'satin', 'promo', 'tunes', 'lucia', 'nerve', 'renew', 'locks', 'euros', 'rebel', 'hired', 'hindu', 'kills', 'slope', 'nails', 'whats', 'rides', 'rehab', 'merit', 'disks', 'condo', 'fairy', 'shaft', 'casio', 'kitty', 'drain', 'monte', 'fires', 'panic', 'leone', 'onion', 'beats', 'merry', 'scuba', 'verde', 'dried', 'derby', 'annie', 'derek', 'steal', 'fears', 'tuner', 'alike', 'sagem', 'scout', 'dealt', 'bucks', 'badge', 'wrist', 'heath', 'lexus', 'realm', 'jenny', 'yemen', 'buses', 'rouge', 'yeast', 'kenny', 'yukon', 'singh', 'brook', 'wives', 'xerox', 'sorts', 'vsnet', 'papua', 'armor', 'viral', 'pipes', 'laden', 'aruba', 'merge', 'edgar', 'dubai', 'allan', 'sperm', 'filme', 'craps', 'frost', 'sally', 'yacht', 'tracy', 'whale', 'shark', 'grows', 'cliff', 'tract', 'shine', 'wendy', 'diffs', 'ozone', 'pasta', 'serum', 'swift', 'inbox', 'focal', 'samba', 'wound', 'belle', 'cindy', 'lined', 'boxed', 'cubic', 'spies', 'elect', 'bunny', 'chevy', 'tions', 'flyer', 'baths', 'emacs', 'climb', 'sparc', 'dover', 'token', 'kinda', 'dylan', 'belts', 'burke', 'clara', 'flush', 'hayes', 'moses', 'johns', 'jewel', 'teddy', 'dryer', 'ruled', 'funky', 'joins', 'scary', 'mpegs', 'cakes', 'mixer', 'sbjct', 'tooth', 'stays', 'drove', 'upset', 'mines', 'logan', 'lance', 'colon', 'lanes', 'purse', 'align', 'bless', 'crest', 'alloy', 'plots', 'tulsa', 'casey', 'draws', 'bloom', 'loops', 'surge', 'tahoe', 'souls', 'spank', 'vault', 'wires', 'mails', 'blake', 'orbit', 'niger', 'bacon', 'paxil', 'spine', 'trout', 'apnic', 'fatty', 'joyce', 'marco', 'isaac', 'oxide', 'badly', 'scoop', 'sanyo', 'blink', 'carlo', 'tiles', 'tamil', 'fuzzy', 'grams', 'forge', 'dense', 'brave', 'awful', 'meyer', 'wagon', 'knock', 'peers', 'quilt', 'notre', 'mambo', 'flour', 'choir', 'blond', 'burst', 'wiley', 'fibre', 'daisy', 'crude', 'bored', 'allah', 'fares', 'hoped', 'safer', 'marsh', 'ricky', 'theta', 'stake', 'arbor', 'home', 'page', 'free', 'time', 'site', 'news', 'also', 'help', 'view', 'like', 'find', 'date', 'back', 'list', 'name', 'year', 'next', 'used', 'work', 'last', 'data', 'make', 'post', 'city', 'best', 'good', 'well', 'info', 'high', 'book', 'read', 'need', 'many', 'user', 'said', 'mail', 'full', 'life', 'know', 'days', 'part', 'real', 'item', 'ebay', 'must', 'made', 'line', 'send', 'type', 'take', 'area', 'want', 'long', 'code', 'show', 'even', 'much', 'sign', 'file', 'link', 'open', 'case', 'game', 'care', 'size', 'shop', 'text', 'rate', 'form', 'love', 'john', 'main', 'call', 'save', 'york', 'card', 'jobs', 'food', 'sale', 'teen', 'room', 'join', 'west', 'look', 'left', 'team', 'week', 'note', 'live', 'june', 'plan', 'cost', 'july', 'test', 'come', 'cart', 'play', 'less', 'blog', 'park', 'side', 'give', 'sell', 'body', 'east', 'club', 'road', 'gift', 'hard', 'four', 'blue', 'easy', 'star', 'hand', 'keep', 'baby', 'term', 'film', 'head', 'cell', 'self', 'away', 'sure', 'cars', 'tell', 'able', 'gold', 'arts', 'past', 'five', 'upon', 'says', 'land', 'done', 'ever', 'word', 'bill', 'talk', 'kids', 'true', 'else', 'mark', 'rock', 'tips', 'plus', 'auto', 'edit', 'fast', 'fact', 'unit', 'tech', 'meet', 'feel', 'bank', 'risk', 'town', 'girl', 'toys', 'golf', 'loan', 'wide', 'sort', 'half', 'step', 'none', 'paul', 'lake', 'sony', 'fire', 'chat', 'html', 'loss', 'face', 'base', 'near', 'stay', 'turn', 'mean', 'king', 'copy', 'drug', 'pics', 'cash', 'seen', 'port', 'stop', 'soon', 'held', 'mind', 'lost', 'tour', 'menu', 'hope', 'wish', 'role', 'came', 'fine', 'hour', 'bush', 'huge', 'kind', 'move', 'logo', 'nice', 'sent', 'band', 'lead', 'went', 'mode', 'fund', 'male', 'took', 'song', 'cnet', 'late', 'fall', 'idea', 'tool', 'hill', 'maps', 'deal', 'hold', 'safe', 'feed', 'hall', 'anti', 'ship', 'paid', 'hair', 'tree', 'thus', 'wall', 'wine', 'vote', 'ways', 'rule', 'told', 'feet', 'door', 'cool', 'asia', 'uses', 'java', 'pass', 'fees', 'skin', 'prev', 'mary', 'ring', 'iraq', 'boys', 'deep', 'rest', 'pool', 'mini', 'fish', 'pack', 'born', 'race', 'debt', 'core', 'sets', 'wood', 'rent', 'dark', 'host', 'isbn', 'fair', 'ohio', 'gets', 'dead', 'mike', 'trip', 'poor', 'eyes', 'farm', 'lord', 'hear', 'goes', 'wife', 'hits', 'zone', 'jack', 'flat', 'flow', 'path', 'laws', 'skip', 'diet', 'army', 'gear', 'lots', 'firm', 'jump', 'dvds', 'ball', 'goal', 'sold', 'wind', 'palm', 'pain', 'xbox', 'oral', 'ford', 'edge', 'root', 'pink', 'shot', 'cold', 'foot', 'mass', 'heat', 'wild', 'miss', 'task', 'soft', 'fuel', 'walk', 'wait', 'rose', 'pick', 'load', 'tags', 'guys', 'drop', 'rich', 'ipod', 'seem', 'hire', 'gave', 'ones', 'rank', 'kong', 'died', 'inch', 'snow', 'camp', 'fill', 'gone', 'fort', 'gene', 'disc', 'boat', 'icon', 'ends', 'cast', 'felt', 'soul', 'aids', 'flag', 'atom', 'iron', 'void', 'disk', 'desk', 'dave', 'hong', 'vice', 'duty', 'bear', 'gain', 'lack', 'iowa', 'knew', 'zoom', 'blow', 'clip', 'wire', 'tape', 'spam', 'acid', 'cent', 'null', 'zero', 'roll', 'bath', 'font', 'beta', 'fail', 'jazz', 'bags', 'wear', 'rare', 'bars', 'dual', 'rise', 'bird', 'lady', 'fans', 'dell', 'seat', 'bids', 'toll', 'cape', 'mine', 'math', 'dogs', 'moon', 'fear', 'wars', 'kept', 'beat', 'arms', 'utah', 'hide', 'slow', 'faqs', 'nine', 'eric', 'spot', 'grow', 'rain', 'onto', 'diff', 'bass', 'hole', 'pets', 'ride', 'pair', 'runs', 'yeah', 'evil', 'euro', 'peak', 'salt', 'bell', 'jeff', 'lane', 'kill', 'ages', 'plug', 'cook', 'perl', 'bike', 'lose', 'seek', 'tony', 'kits', 'soil', 'matt', 'exit', 'iran', 'keys', 'wave', 'holy', 'acts', 'mesh', 'dean', 'poll', 'unix', 'bond', 'jean', 'visa', 'pure', 'lens', 'draw', 'warm', 'babe', 'crew', 'legs', 'rear', 'node', 'lock', 'mile', 'mens', 'bowl', 'tank', 'navy', 'dish', 'adam', 'slot', 'gray', 'demo', 'hate', 'rice', 'loop', 'gary', 'vary', 'rome', 'arab', 'milk', 'boot', 'push', 'misc', 'alan', 'dear', 'beer', 'jose', 'jane', 'earn', 'twin', 'dont', 'bits', 'suit', 'chip', 'char', 'echo', 'grid', 'voip', 'pull', 'nasa', 'nick', 'plot', 'pump', 'anne', 'exam', 'ryan', 'beds', 'grey', 'bold', 'scan', 'aged', 'bulk', 'pmid', 'cute', 'para', 'seed', 'peer', 'meat', 'alex', 'bang', 'bone', 'bugs', 'gate', 'tone', 'busy', 'neck', 'wing', 'tiny', 'rail', 'tube', 'belt', 'luck', 'dial', 'gang', 'cake', 'semi', 'andy', 'cafe', 'till', 'shoe', 'sand', 'seal', 'lies', 'pipe', 'deck', 'thin', 'sick', 'dose', 'lets', 'cats', 'greg', 'folk', 'okay', 'hist', 'lift', 'lisa', 'mall', 'fell', 'yard', 'sean', 'pour', 'tion', 'dust', 'wiki', 'kent', 'adds', 'ward', 'roof', 'kiss', 'rush', 'mpeg', 'yoga', 'lamp', 'rico', 'phil', 'http', 'glad', 'wins', 'rack', 'boss', 'ross', 'anna', 'solo', 'tall', 'pdas', 'nova', 'wake', 'drum', 'foto', 'ease', 'tabs', 'pine', 'tend', 'gulf', 'rick', 'hunt', 'thai', 'fred', 'mill', 'burn', 'labs', 'sole', 'laid', 'clay', 'weak', 'blvd', 'wise', 'odds', 'marc', 'sons', 'leaf', 'cuba', 'silk', 'kate', 'wolf', 'fits', 'kick', 'meal', 'hurt', 'slip', 'cuts', 'mars', 'caps', 'pill', 'meta', 'mint', 'spin', 'wash', 'aims', 'ieee', 'corp', 'soap', 'axis', 'guns', 'hero', 'punk', 'duke', 'pace', 'wage', 'dawn', 'carl', 'coat', 'rica', 'doll', 'peru', 'nike', 'reed', 'mice', 'temp', 'vast', 'wrap', 'mood', 'quiz', 'beam', 'tops', 'shut', 'ncaa', 'thou', 'mask', 'coal', 'lion', 'goto', 'neil', 'beef', 'hats', 'surf', 'hook', 'cord', 'crop', 'lite', 'sing', 'tons', 'hang', 'hood', 'fame', 'eggs', 'ruby', 'mins', 'stem', 'drew', 'tune', 'corn', 'puts', 'grew', 'trek', 'ties', 'brad', 'jury', 'tail', 'lawn', 'soup', 'byte', 'nose', 'oclc', 'juan', 'thru', 'jews', 'trim', 'espn', 'quit', 'lung', 'todd', 'doug', 'sees', 'bull', 'cole', 'mart', 'tale', 'lynn', 'docs', 'coin', 'fake', 'cure', 'arch', 'hdtv', 'asin', 'bomb', 'harm', 'deer', 'oven', 'noon', 'cams', 'joel', 'proc', 'mate', 'chef', 'isle', 'slim', 'luke', 'comp', 'pete', 'spec', 'penn', 'midi', 'tied', 'dale', 'oils', 'sept', 'unto', 'pays', 'lang', 'stud', 'fold', 'phys', 'pole', 'mega', 'bend', 'moms', 'glen', 'lips', 'pond', 'tire', 'chad', 'josh', 'drag', 'ripe', 'rely', 'scsi', 'nuts', 'nail', 'span', 'joke', 'univ', 'pads', 'inns', 'cups', 'foam', 'poem', 'asks', 'bean', 'bias', 'swim', 'nano', 'loud', 'rats', 'stat', 'cruz', 'bios', 'thee', 'ruth', 'pray', 'pope', 'jeep', 'bare', 'hung', 'mono', 'tile', 'apps', 'ciao', 'knee', 'prep', 'chem', 'pros', 'cant', 'sara', 'joan', 'duck', 'dive', 'fiji', 'audi', 'raid', 'volt', 'dirt', 'acer', 'dist', 'geek', 'sink', 'grip', 'watt', 'pins', 'reno', 'polo', 'horn', 'prot', 'frog', 'logs', 'snap', 'jpeg', 'swap', 'flip', 'buzz', 'nuke', 'boom', 'calm', 'fork', 'troy', 'zope', 'gmbh', 'sims', 'tray', 'sage', 'suse', 'cave', 'wool', 'eyed', 'grab', 'oops', 'trap', 'fool', 'karl', 'dies', 'jail', 'ipaq', 'comm', 'lace', 'ugly', 'hart', 'ment', 'biol', 'rows', 'treo', 'gods', 'poly', 'ears', 'fist', 'mere', 'cons', 'taxi', 'worn', 'shaw', 'expo', 'deny', 'bali', 'judy', 'trio', 'cube', 'rugs', 'fate', 'gras', 'oval', 'soma', 'href', 'benz', 'wifi', 'tier', 'earl', 'guam', 'cite', 'mess', 'rope', 'dump', 'hose', 'pubs', 'mild', 'clan', 'sync', 'mesa', 'hull', 'shed', 'memo', 'tide', 'funk', 'reel', 'bind', 'rand', 'buck', 'usgs', 'acre', 'lows', 'aqua', 'chen', 'emma', 'pest', 'reef', 'chan', 'beth', 'jill', 'sofa', 'dans', 'viii', 'tent', 'dept', 'hack', 'dare', 'hawk', 'lamb', 'junk', 'lucy', 'hans', 'poet', 'epic', 'sake', 'sans', 'lean', 'dude', 'luis', 'alto', 'gore', 'cult', 'dash', 'cage', 'divx', 'hugh', 'jake', 'eval', 'ping', 'flux', 'muze', 'oman', 'rage', 'adsl', 'prix', 'avon', 'rays', 'walt', 'acne', 'libs', 'undo', 'dana', 'halo', 'gays', 'exec', 'maui', 'vids', 'yale', 'doom', 'owen', 'bite', 'issn', 'myth', 'weed', 'oecd', 'dice', 'quad', 'dock', 'mods', 'hint', 'msie', 'buys', 'pork', 'barn', 'fare', 'asus', 'bald', 'fuji', 'leon', 'mold', 'dame', 'herb', 'alot', 'idle', 'cove', 'casa', 'eden', 'incl', 'reid', 'flex', 'rosa', 'hash', 'lazy', 'carb', 'pens', 'worm', 'deaf', 'mats', 'blah', 'mime', 'feof', 'usda', 'keen', 'peas', 'urls', 'owns', 'zinc', 'guru', 'levy', 'grad', 'bras', 'kyle', 'pale', 'gaps', 'tear', 'nest', 'nato', 'gale', 'stan', 'idol', 'moss', 'cork', 'mali', 'dome', 'heel', 'yang', 'dumb', 'feat', 'ntsc', 'usps', 'conf', 'glow', 'oaks', 'erik', 'paso', 'norm', 'ware', 'jade', 'foul', 'keno', 'seas', 'pose', 'mrna', 'goat', 'sail', 'sega', 'cdna', 'bolt', 'gage', 'urge', 'smtp', 'kurt', 'neon', 'lone', 'cope', 'lime', 'kirk', 'bool', 'spas', 'jets', 'intl', 'yarn', 'knit', 'pike', 'hugo', 'gzip', 'ctrl', 'bent', 'laos']
| 48,512 | 97,023 | 0.639842 | 8,737 | 97,024 | 7.105299 | 0.999886 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09005 | 97,024 | 1 | 97,024 | 97,024 | 0.70315 | 0 | 0 | 0 | 0 | 0 | 0.639749 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 8 |
b6882e8f422c5a132f9592d1592559e28e6969c2 | 24,679 | py | Python | yandex/cloud/serverless/apigateway/v1/apigateway_service_pb2_grpc.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 36 | 2018-12-23T13:51:50.000Z | 2022-03-25T07:48:24.000Z | yandex/cloud/serverless/apigateway/v1/apigateway_service_pb2_grpc.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 15 | 2019-02-28T04:55:09.000Z | 2022-03-06T23:17:24.000Z | yandex/cloud/serverless/apigateway/v1/apigateway_service_pb2_grpc.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 18 | 2019-02-23T07:10:57.000Z | 2022-03-28T14:41:08.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from yandex.cloud.access import access_pb2 as yandex_dot_cloud_dot_access_dot_access__pb2
from yandex.cloud.operation import operation_pb2 as yandex_dot_cloud_dot_operation_dot_operation__pb2
from yandex.cloud.serverless.apigateway.v1 import apigateway_pb2 as yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__pb2
from yandex.cloud.serverless.apigateway.v1 import apigateway_service_pb2 as yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2
class ApiGatewayServiceStub(object):
"""A set of methods for managing API gateways.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Get',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetApiGatewayRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__pb2.ApiGateway.FromString,
)
self.List = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/List',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayResponse.FromString,
)
self.Create = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Create',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.CreateApiGatewayRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Update = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Update',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.UpdateApiGatewayRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Delete = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Delete',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.DeleteApiGatewayRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.AddDomain = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/AddDomain',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.AddDomainRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.RemoveDomain = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/RemoveDomain',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.RemoveDomainRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.GetOpenapiSpec = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/GetOpenapiSpec',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecResponse.FromString,
)
self.ListOperations = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/ListOperations',
request_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsResponse.FromString,
)
self.ListAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/ListAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsResponse.FromString,
)
self.SetAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/SetAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.SetAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.UpdateAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/UpdateAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.UpdateAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
class ApiGatewayServiceServicer(object):
"""A set of methods for managing API gateways.
"""
def Get(self, request, context):
"""Returns the specified API gateway. Note that only API gateway basic attributes are returned.
To get associated openapi specification, make a [GetOpenapiSpec] request.
To get the list of all available API gateways, make a [List] request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""Retrieves the list of API gateways in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Create(self, request, context):
"""Creates an API gateway in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
"""Updates the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Deletes the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddDomain(self, request, context):
"""Attaches domain to the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RemoveDomain(self, request, context):
"""Detaches domain from the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetOpenapiSpec(self, request, context):
"""Returns the OpenAPI specification of specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListOperations(self, request, context):
"""Lists operations for the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAccessBindings(self, request, context):
"""Lists existing access bindings for the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetAccessBindings(self, request, context):
"""Sets access bindings for the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAccessBindings(self, request, context):
"""Updates access bindings for the specified API gateway.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ApiGatewayServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetApiGatewayRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__pb2.ApiGateway.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayResponse.SerializeToString,
),
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.CreateApiGatewayRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.UpdateApiGatewayRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.DeleteApiGatewayRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'AddDomain': grpc.unary_unary_rpc_method_handler(
servicer.AddDomain,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.AddDomainRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'RemoveDomain': grpc.unary_unary_rpc_method_handler(
servicer.RemoveDomain,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.RemoveDomainRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'GetOpenapiSpec': grpc.unary_unary_rpc_method_handler(
servicer.GetOpenapiSpec,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecResponse.SerializeToString,
),
'ListOperations': grpc.unary_unary_rpc_method_handler(
servicer.ListOperations,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsResponse.SerializeToString,
),
'ListAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.ListAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsResponse.SerializeToString,
),
'SetAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.SetAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.SetAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'UpdateAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.UpdateAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'yandex.cloud.serverless.apigateway.v1.ApiGatewayService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ApiGatewayService(object):
"""A set of methods for managing API gateways.
"""
@staticmethod
def Get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Get',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetApiGatewayRequest.SerializeToString,
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__pb2.ApiGateway.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/List',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayRequest.SerializeToString,
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListApiGatewayResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Create',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.CreateApiGatewayRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Update',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.UpdateApiGatewayRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/Delete',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.DeleteApiGatewayRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddDomain(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/AddDomain',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.AddDomainRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RemoveDomain(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/RemoveDomain',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.RemoveDomainRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetOpenapiSpec(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/GetOpenapiSpec',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecRequest.SerializeToString,
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.GetOpenapiSpecResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListOperations(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/ListOperations',
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsRequest.SerializeToString,
yandex_dot_cloud_dot_serverless_dot_apigateway_dot_v1_dot_apigateway__service__pb2.ListOperationsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListAccessBindings(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/ListAccessBindings',
yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsRequest.SerializeToString,
yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetAccessBindings(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/SetAccessBindings',
yandex_dot_cloud_dot_access_dot_access__pb2.SetAccessBindingsRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAccessBindings(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.serverless.apigateway.v1.ApiGatewayService/UpdateAccessBindings',
yandex_dot_cloud_dot_access_dot_access__pb2.UpdateAccessBindingsRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 54.720621 | 164 | 0.716155 | 2,426 | 24,679 | 6.8277 | 0.062242 | 0.064356 | 0.064236 | 0.078 | 0.889942 | 0.886561 | 0.882939 | 0.838264 | 0.833857 | 0.766663 | 0 | 0.007701 | 0.221281 | 24,679 | 450 | 165 | 54.842222 | 0.854199 | 0.051177 | 0 | 0.568 | 1 | 0 | 0.101465 | 0.072211 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069333 | false | 0 | 0.013333 | 0.032 | 0.122667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e8970e57258c9482d48f0f521608b93fc6eac13 | 2,105 | py | Python | CPAC/seg_preproc/utils.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | CPAC/seg_preproc/utils.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | CPAC/seg_preproc/utils.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | # Import packages
import os
import sys
import re
import commands
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
def pick_wm_0(probability_maps):
"""
Returns the csf probability map from the list of segmented probability maps
Parameters
----------
probability_maps : list (string)
List of Probability Maps
Returns
-------
file : string
Path to segment_prob_0.nii.gz is returned
"""
import sys
import os
if(isinstance(probability_maps, list)):
if(len(probability_maps) == 1):
probability_maps = probability_maps[0]
for file in probability_maps:
print file
if file.endswith("prob_0.nii.gz"):
return file
return None
def pick_wm_1(probability_maps):
"""
Returns the gray matter probability map from the list of segmented probability maps
Parameters
----------
probability_maps : list (string)
List of Probability Maps
Returns
-------
file : string
Path to segment_prob_1.nii.gz is returned
"""
import sys
import os
if(isinstance(probability_maps, list)):
if(len(probability_maps) == 1):
probability_maps = probability_maps[0]
for file in probability_maps:
print file
if file.endswith("prob_1.nii.gz"):
return file
return None
def pick_wm_2(probability_maps):
"""
Returns the white matter probability map from the list of segmented probability maps
Parameters
----------
probability_maps : list (string)
List of Probability Maps
Returns
-------
file : string
Path to segment_prob_2.nii.gz is returned
"""
import sys
import os
if(isinstance(probability_maps, list)):
if(len(probability_maps) == 1):
probability_maps = probability_maps[0]
for file in probability_maps:
print file
if file.endswith("prob_2.nii.gz"):
return file
return None
| 19.311927 | 88 | 0.610451 | 252 | 2,105 | 4.956349 | 0.210317 | 0.324259 | 0.105685 | 0.060048 | 0.821457 | 0.821457 | 0.801441 | 0.801441 | 0.801441 | 0.746998 | 0 | 0.010267 | 0.305938 | 2,105 | 108 | 89 | 19.490741 | 0.844627 | 0.007126 | 0 | 0.74359 | 0 | 0 | 0.032365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.307692 | null | null | 0.076923 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 10 |
1e9b6e4b49b20269d842ef5a6b5484993e675c8c | 46,815 | py | Python | montreal_forced_aligner/multiprocessing/transcription.py | erayee/Montreal-Forced-Aligner | becd3b316d1999212910b4d0976e8c0405241493 | [
"MIT"
] | 702 | 2016-06-14T23:18:05.000Z | 2022-03-28T07:39:54.000Z | montreal_forced_aligner/multiprocessing/transcription.py | erayee/Montreal-Forced-Aligner | becd3b316d1999212910b4d0976e8c0405241493 | [
"MIT"
] | 399 | 2016-06-15T17:02:58.000Z | 2022-03-31T06:21:41.000Z | montreal_forced_aligner/multiprocessing/transcription.py | erayee/Montreal-Forced-Aligner | becd3b316d1999212910b4d0976e8c0405241493 | [
"MIT"
] | 175 | 2016-07-27T15:26:04.000Z | 2022-03-22T04:05:57.000Z | import subprocess
import os
import shutil
import sys
from .helper import run_mp, run_non_mp, thirdparty_binary
from ..dictionary import MultispeakerDictionary
def decode_func(model_directory, job_name, config, feat_string, output_directory, num_threads=None,
dictionary_names=None):
mdl_path = os.path.join(model_directory, 'final.mdl')
if dictionary_names is None:
lat_path = os.path.join(output_directory, 'lat.{}'.format(job_name))
if os.path.exists(lat_path):
return
word_symbol_path = os.path.join(model_directory, 'words.txt')
hclg_path = os.path.join(model_directory, 'HCLG.fst')
if config.fmllr and config.first_beam is not None:
beam = config.first_beam
else:
beam = config.beam
if config.fmllr and config.first_max_active is not None and not config.no_speakers:
max_active = config.first_max_active
else:
max_active = config.max_active
log_path = os.path.join(output_directory, 'log', 'decode.{}.log'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
decode_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster'),
'--max-active={}'.format(max_active),
'--beam={}'.format(beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
'--acoustic-scale={}'.format(config.acoustic_scale),
mdl_path, hclg_path, feat_string,
"ark:" + lat_path],
stderr=log_file)
else:
decode_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster-parallel'),
'--max-active={}'.format(max_active),
'--beam={}'.format(beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--num-threads={}'.format(num_threads),
mdl_path, hclg_path, feat_string,
"ark:" + lat_path],
stderr=log_file)
decode_proc.communicate()
else:
for name in dictionary_names:
lat_path = os.path.join(output_directory, 'lat.{}.{}'.format(job_name, name))
if os.path.exists(lat_path):
continue
word_symbol_path = os.path.join(model_directory, name + '_words.txt')
hclg_path = os.path.join(model_directory, name + '_HCLG.fst')
if config.fmllr and config.first_beam is not None:
beam = config.first_beam
else:
beam = config.beam
if config.fmllr and config.first_max_active is not None and not config.no_speakers:
max_active = config.first_max_active
else:
max_active = config.max_active
log_path = os.path.join(output_directory, 'log', 'decode.{}.{}.log'.format(job_name, name))
dictionary_feat_string = feat_string.replace('feats.{}.scp'.format(job_name),
'feats.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('cmvn.{}.scp'.format(job_name),
'cmvn.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('utt2spk.{}'.format(job_name),
'utt2spk.{}.{}'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
decode_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster'),
'--max-active={}'.format(max_active),
'--beam={}'.format(beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
'--acoustic-scale={}'.format(config.acoustic_scale),
mdl_path, hclg_path, dictionary_feat_string,
"ark:" + lat_path],
stderr=log_file)
else:
decode_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster-parallel'),
'--max-active={}'.format(max_active),
'--beam={}'.format(beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--num-threads={}'.format(num_threads),
mdl_path, hclg_path, dictionary_feat_string,
"ark:" + lat_path],
stderr=log_file)
decode_proc.communicate()
def score_func(model_directory, transcribe_directory, job_name, config, output_directory, language_model_weight=None,
word_insertion_penalty=None, dictionary_names=None):
if language_model_weight is None:
language_model_weight = config.language_model_weight
if word_insertion_penalty is None:
word_insertion_penalty = config.word_insertion_penalty
if dictionary_names is None:
lat_path = os.path.join(transcribe_directory, 'lat.{}'.format(job_name))
rescored_lat_path = os.path.join(transcribe_directory, 'lat.{}.rescored'.format(job_name))
carpa_rescored_lat_path = os.path.join(transcribe_directory, 'lat.{}.carparescored'.format(job_name))
if os.path.exists(carpa_rescored_lat_path):
lat_path = carpa_rescored_lat_path
elif os.path.exists(rescored_lat_path):
lat_path = rescored_lat_path
words_path = os.path.join(model_directory, 'words.txt')
tra_path = os.path.join(output_directory, 'tra.{}'.format(job_name))
log_path = os.path.join(output_directory, 'log', 'score.{}.log'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
scale_proc = subprocess.Popen([thirdparty_binary('lattice-scale'),
'--inv-acoustic-scale={}'.format(language_model_weight),
'ark:' + lat_path, 'ark:-'
], stdout=subprocess.PIPE, stderr=log_file)
penalty_proc = subprocess.Popen([thirdparty_binary('lattice-add-penalty'),
'--word-ins-penalty={}'.format(word_insertion_penalty),
'ark:-', 'ark:-'],
stdin=scale_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
best_path_proc = subprocess.Popen([thirdparty_binary('lattice-best-path'),
'--word-symbol-table={}'.format(words_path),
'ark:-', 'ark,t:' + tra_path], stdin=penalty_proc.stdout,
stderr=log_file)
best_path_proc.communicate()
else:
for name in dictionary_names:
lat_path = os.path.join(transcribe_directory, 'lat.{}.{}'.format(job_name, name))
rescored_lat_path = os.path.join(transcribe_directory, 'lat.{}.{}.rescored'.format(job_name, name))
carpa_rescored_lat_path = os.path.join(transcribe_directory,
'lat.{}.{}.carparescored'.format(job_name, name))
if os.path.exists(carpa_rescored_lat_path):
lat_path = carpa_rescored_lat_path
elif os.path.exists(rescored_lat_path):
lat_path = rescored_lat_path
words_path = os.path.join(model_directory, name + '_words.txt')
tra_path = os.path.join(output_directory, 'tra.{}.{}'.format(job_name, name))
log_path = os.path.join(output_directory, 'log', 'score.{}.{}.log'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
scale_proc = subprocess.Popen([thirdparty_binary('lattice-scale'),
'--inv-acoustic-scale={}'.format(language_model_weight),
'ark:' + lat_path, 'ark:-'
], stdout=subprocess.PIPE, stderr=log_file)
penalty_proc = subprocess.Popen([thirdparty_binary('lattice-add-penalty'),
'--word-ins-penalty={}'.format(word_insertion_penalty),
'ark:-', 'ark:-'],
stdin=scale_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
best_path_proc = subprocess.Popen([thirdparty_binary('lattice-best-path'),
'--word-symbol-table={}'.format(words_path),
'ark:-', 'ark,t:' + tra_path], stdin=penalty_proc.stdout,
stderr=log_file)
best_path_proc.communicate()
def lm_rescore_func(model_directory, job_name, config, feat_string, output_directory, num_threads=None,
dictionary_names=None):
if sys.platform == 'win32':
project_type_arg = '--project_output=true'
else:
project_type_arg = '--project_type=output'
if dictionary_names is None:
rescored_lat_path = os.path.join(output_directory, 'lat.{}.lmrescored'.format(job_name))
lat_path = os.path.join(output_directory, 'lat.{}'.format(job_name))
old_g_path = os.path.join(model_directory, 'small_G.fst')
new_g_path = os.path.join(model_directory, 'med_G.fst')
log_path = os.path.join(output_directory, 'log', 'lm_rescore.{}.log'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
lattice_scale_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
f"fstproject {project_type_arg} {old_g_path} |",
f"fstproject {project_type_arg} {new_g_path} |",
'ark:' + lat_path, 'ark:' + rescored_lat_path], stderr=log_file)
lattice_scale_proc.communicate()
else:
for name in dictionary_names:
rescored_lat_path = os.path.join(output_directory, 'lat.{}.{}.lmrescored'.format(job_name, name))
lat_path = os.path.join(output_directory, 'lat.{}.{}'.format(job_name, name))
old_g_path = os.path.join(model_directory, name + '_small_G.fst')
new_g_path = os.path.join(model_directory, name + '_med_G.fst')
log_path = os.path.join(output_directory, 'log', 'lm_rescore.{}.{}.log'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
lattice_scale_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
f"fstproject {project_type_arg} {old_g_path} |",
f"fstproject {project_type_arg} {new_g_path} |",
'ark:' + lat_path, 'ark:' + rescored_lat_path], stderr=log_file)
lattice_scale_proc.communicate()
def carpa_lm_rescore_func(model_directory, job_name, config, feat_string, output_directory, num_threads=None,
dictionary_names=None):
if sys.platform == 'win32':
project_type_arg = '--project_output=true'
else:
project_type_arg = '--project_type=output'
if dictionary_names is None:
lat_path = os.path.join(output_directory, 'lat.{}.lmrescored'.format(job_name))
rescored_lat_path = os.path.join(output_directory, 'lat.{}.carparescored'.format(job_name))
if os.path.exists(rescored_lat_path):
return
old_g_path = os.path.join(model_directory, 'med_G.fst')
new_g_path = os.path.join(model_directory, 'G.carpa')
log_path = os.path.join(output_directory, 'log', 'carpa_lm_rescore.{}.log'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
lmrescore_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore'),
'--lm-scale=-1.0', 'ark:' + lat_path,
f"fstproject {project_type_arg} {old_g_path} |",
'ark:-'], stdout=subprocess.PIPE, stderr=log_file)
lmrescore_const_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore-const-arpa'),
'--lm-scale=1.0', 'ark:-',
new_g_path,
'ark:' + rescored_lat_path], stdin=lmrescore_proc.stdout,
stderr=log_file)
lmrescore_const_proc.communicate()
else:
for name in dictionary_names:
lat_path = os.path.join(output_directory, 'lat.{}.{}.lmrescored'.format(job_name, name))
rescored_lat_path = os.path.join(output_directory, 'lat.{}.{}.carparescored'.format(job_name, name))
if os.path.exists(rescored_lat_path):
continue
old_g_path = os.path.join(model_directory, name + '_med_G.fst')
new_g_path = os.path.join(model_directory, name + '_G.carpa')
log_path = os.path.join(output_directory, 'log', 'carpa_lm_rescore.{}.{}.log'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
lmrescore_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore'),
'--lm-scale=-1.0', 'ark:' + lat_path,
f"fstproject {project_type_arg} {old_g_path} |",
'ark:-'], stdout=subprocess.PIPE, stderr=log_file)
lmrescore_const_proc = subprocess.Popen([thirdparty_binary('lattice-lmrescore-const-arpa'),
'--lm-scale=1.0', 'ark:-',
new_g_path,
'ark:' + rescored_lat_path], stdin=lmrescore_proc.stdout,
stderr=log_file)
lmrescore_const_proc.communicate()
def transcribe(transcriber):
"""
"""
model_directory = transcriber.model_directory
decode_directory = transcriber.transcribe_directory
log_directory = os.path.join(decode_directory, 'log')
config = transcriber.transcribe_config
corpus = transcriber.corpus
num_jobs = corpus.num_jobs
jobs = [(model_directory, x, config,
config.feature_config.construct_feature_proc_string(corpus.split_directory(), model_directory, x),
decode_directory, corpus.original_num_jobs, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
run_non_mp(decode_func, jobs, log_directory)
if config.use_mp:
run_mp(lm_rescore_func, jobs, log_directory)
else:
run_non_mp(lm_rescore_func, jobs, log_directory)
if config.use_mp:
run_mp(carpa_lm_rescore_func, jobs, log_directory)
else:
run_non_mp(carpa_lm_rescore_func, jobs, log_directory)
if transcriber.evaluation_mode:
best_wer = 10000
best = None
for lmwt in range(transcriber.min_language_model_weight, transcriber.max_language_model_weight):
for wip in transcriber.word_insertion_penalties:
out_dir = os.path.join(decode_directory, 'eval_{}_{}'.format(lmwt, wip))
log_dir = os.path.join(out_dir, 'log')
os.makedirs(log_dir, exist_ok=True)
jobs = [(model_directory, decode_directory, x, config, out_dir, lmwt, wip,
transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(score_func, jobs, log_dir)
else:
run_non_mp(score_func, jobs, log_dir)
ser, wer = transcriber.evaluate(out_dir, out_dir)
if wer < best_wer:
best = (lmwt, wip)
transcriber.transcribe_config.language_model_weight = best[0]
transcriber.transcribe_config.word_insertion_penalty = best[1]
else:
jobs = [(model_directory, decode_directory, x, config, decode_directory, None, None,
transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(score_func, jobs, log_directory)
else:
run_non_mp(score_func, jobs, log_directory)
def initial_fmllr_func(initial_directory, split_directory, sil_phones, job_name, mdl, config, feat_string,
output_directory,
num_threads=None, dictionary_names=None):
if dictionary_names is None:
log_path = os.path.join(output_directory, 'log', 'initial_fmllr.{}.log'.format(job_name))
pre_trans_path = os.path.join(output_directory, 'pre_trans.{}'.format(job_name))
lat_path = os.path.join(initial_directory, 'lat.{}'.format(job_name))
spk2utt_path = os.path.join(split_directory, 'spk2utt.{}'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
latt_post_proc = subprocess.Popen([thirdparty_binary('lattice-to-post'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'ark:' + lat_path, 'ark:-'], stdout=subprocess.PIPE,
stderr=log_file)
weight_silence_proc = subprocess.Popen([thirdparty_binary('weight-silence-post'),
str(config.silence_weight),
sil_phones, mdl, 'ark:-', 'ark:-'],
stdin=latt_post_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
gmm_gpost_proc = subprocess.Popen([thirdparty_binary('gmm-post-to-gpost'),
mdl, feat_string, 'ark:-', 'ark:-'],
stdin=weight_silence_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
fmllr_proc = subprocess.Popen([thirdparty_binary('gmm-est-fmllr-gpost'),
'--fmllr-update-type={}'.format(config.fmllr_update_type),
'--spk2utt=ark:' + spk2utt_path, mdl, feat_string,
'ark,s,cs:-', 'ark:' + pre_trans_path],
stdin=gmm_gpost_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
fmllr_proc.communicate()
else:
for name in dictionary_names:
log_path = os.path.join(output_directory, 'log', 'initial_fmllr.{}.{}.log'.format(job_name, name))
pre_trans_path = os.path.join(output_directory, 'pre_trans.{}.{}'.format(job_name, name))
lat_path = os.path.join(initial_directory, 'lat.{}.{}'.format(job_name, name))
spk2utt_path = os.path.join(split_directory, 'spk2utt.{}.{}'.format(job_name, name))
dictionary_feat_string = feat_string.replace('feats.{}.scp'.format(job_name),
'feats.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('cmvn.{}.scp'.format(job_name),
'cmvn.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('utt2spk.{}'.format(job_name),
'utt2spk.{}.{}'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
latt_post_proc = subprocess.Popen([thirdparty_binary('lattice-to-post'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'ark:' + lat_path, 'ark:-'], stdout=subprocess.PIPE,
stderr=log_file)
weight_silence_proc = subprocess.Popen([thirdparty_binary('weight-silence-post'),
str(config.silence_weight),
sil_phones, mdl, 'ark:-', 'ark:-'],
stdin=latt_post_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
gmm_gpost_proc = subprocess.Popen([thirdparty_binary('gmm-post-to-gpost'),
mdl, dictionary_feat_string, 'ark:-', 'ark:-'],
stdin=weight_silence_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
fmllr_proc = subprocess.Popen([thirdparty_binary('gmm-est-fmllr-gpost'),
'--fmllr-update-type={}'.format(config.fmllr_update_type),
'--spk2utt=ark:' + spk2utt_path, mdl, dictionary_feat_string,
'ark,s,cs:-', 'ark:' + pre_trans_path],
stdin=gmm_gpost_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
fmllr_proc.communicate()
def lat_gen_fmllr_func(model_directory, split_directory, sil_phones, job_name, mdl, config, feat_string,
output_directory,
num_threads=None, dictionary_names=None):
if dictionary_names is None:
log_path = os.path.join(output_directory, 'log', 'lat_gen.{}.log'.format(job_name))
word_symbol_path = os.path.join(model_directory, 'words.txt')
hclg_path = os.path.join(model_directory, 'HCLG.fst')
tmp_lat_path = os.path.join(output_directory, 'lat.tmp.{}'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
lat_gen_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster'),
'--max-active={}'.format(config.max_active),
'--beam={}'.format(config.beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--determinize-lattice=false',
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
mdl, hclg_path, feat_string, 'ark:' + tmp_lat_path
], stderr=log_file)
else:
lat_gen_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster-parallel'),
'--max-active={}'.format(config.max_active),
'--beam={}'.format(config.beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--determinize-lattice=false',
'--allow-partial=true',
'--num-threads={}'.format(num_threads),
'--word-symbol-table={}'.format(word_symbol_path),
mdl, hclg_path, feat_string, 'ark:' + tmp_lat_path
], stderr=log_file)
lat_gen_proc.communicate()
else:
for name in dictionary_names:
log_path = os.path.join(output_directory, 'log', 'lat_gen.{}.{}.log'.format(job_name, name))
word_symbol_path = os.path.join(model_directory, name + '_words.txt')
hclg_path = os.path.join(model_directory, name + '_HCLG.fst')
tmp_lat_path = os.path.join(output_directory, 'lat.tmp.{}.{}'.format(job_name, name))
dictionary_feat_string = feat_string.replace('feats.{}.scp'.format(job_name),
'feats.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('cmvn.{}.scp'.format(job_name),
'cmvn.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('utt2spk.{}'.format(job_name),
'utt2spk.{}.{}'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
lat_gen_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster'),
'--max-active={}'.format(config.max_active),
'--beam={}'.format(config.beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--determinize-lattice=false',
'--allow-partial=true',
'--word-symbol-table={}'.format(word_symbol_path),
mdl, hclg_path, dictionary_feat_string, 'ark:' + tmp_lat_path
], stderr=log_file)
else:
lat_gen_proc = subprocess.Popen([thirdparty_binary('gmm-latgen-faster-parallel'),
'--max-active={}'.format(config.max_active),
'--beam={}'.format(config.beam),
'--lattice-beam={}'.format(config.lattice_beam),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--determinize-lattice=false',
'--allow-partial=true',
'--num-threads={}'.format(num_threads),
'--word-symbol-table={}'.format(word_symbol_path),
mdl, hclg_path, dictionary_feat_string, 'ark:' + tmp_lat_path
], stderr=log_file)
lat_gen_proc.communicate()
def final_fmllr_est_func(model_directory, split_directory, sil_phones, job_name, mdl, config, feat_string, si_directory,
fmllr_directory, num_threads=None, dictionary_names=None):
if dictionary_names is None:
log_path = os.path.join(fmllr_directory, 'log', 'final_fmllr.{}.log'.format(job_name))
pre_trans_path = os.path.join(fmllr_directory, 'pre_trans.{}'.format(job_name))
trans_tmp_path = os.path.join(fmllr_directory, 'trans_tmp.{}'.format(job_name))
trans_path = os.path.join(fmllr_directory, 'trans.{}'.format(job_name))
lat_path = os.path.join(si_directory, 'lat.{}'.format(job_name))
spk2utt_path = os.path.join(split_directory, 'spk2utt.{}'.format(job_name))
tmp_lat_path = os.path.join(fmllr_directory, 'lat.tmp.{}'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam=4.0', 'ark:' + tmp_lat_path, 'ark:-'],
stderr=log_file, stdout=subprocess.PIPE)
else:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned-parallel'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--num-threads={}'.format(num_threads),
'--beam=4.0', 'ark:' + tmp_lat_path, 'ark:-'],
stderr=log_file, stdout=subprocess.PIPE)
latt_post_proc = subprocess.Popen([thirdparty_binary('lattice-to-post'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'ark:-', 'ark:-'],
stdin=determinize_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
weight_silence_proc = subprocess.Popen([thirdparty_binary('weight-silence-post'),
str(config.silence_weight),
sil_phones, mdl, 'ark:-', 'ark:-'],
stdin=latt_post_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
fmllr_proc = subprocess.Popen([thirdparty_binary('gmm-est-fmllr'),
'--fmllr-update-type={}'.format(config.fmllr_update_type),
'--spk2utt=ark:' + spk2utt_path, mdl, feat_string,
'ark,s,cs:-', 'ark:-'],
stdin=weight_silence_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
compose_proc = subprocess.Popen([thirdparty_binary('compose-transforms'),
'--b-is-affine=true', 'ark:-',
'ark:' + pre_trans_path, 'ark:' + trans_path],
stderr=log_file, stdin=fmllr_proc.stdout)
compose_proc.communicate()
else:
for name in dictionary_names:
log_path = os.path.join(fmllr_directory, 'log', 'final_fmllr.{}.{}.log'.format(job_name, name))
pre_trans_path = os.path.join(fmllr_directory, 'pre_trans.{}.{}'.format(job_name, name))
trans_tmp_path = os.path.join(fmllr_directory, 'trans_tmp.{}.{}'.format(job_name, name))
trans_path = os.path.join(fmllr_directory, 'trans.{}.{}'.format(job_name, name))
lat_path = os.path.join(si_directory, 'lat.{}.{}'.format(job_name, name))
spk2utt_path = os.path.join(split_directory, 'spk2utt.{}.{}'.format(job_name, name))
tmp_lat_path = os.path.join(fmllr_directory, 'lat.tmp.{}.{}'.format(job_name, name))
dictionary_feat_string = feat_string.replace('feats.{}.scp'.format(job_name),
'feats.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('cmvn.{}.scp'.format(job_name),
'cmvn.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('utt2spk.{}'.format(job_name),
'utt2spk.{}.{}'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
if num_threads is None:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam=4.0', 'ark:' + tmp_lat_path, 'ark:-'],
stderr=log_file, stdout=subprocess.PIPE)
else:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned-parallel'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--num-threads={}'.format(num_threads),
'--beam=4.0', 'ark:' + tmp_lat_path, 'ark:-'],
stderr=log_file, stdout=subprocess.PIPE)
latt_post_proc = subprocess.Popen([thirdparty_binary('lattice-to-post'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'ark:-', 'ark:-'],
stdin=determinize_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
weight_silence_proc = subprocess.Popen([thirdparty_binary('weight-silence-post'),
str(config.silence_weight),
sil_phones, mdl, 'ark:-', 'ark:-'],
stdin=latt_post_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
fmllr_proc = subprocess.Popen([thirdparty_binary('gmm-est-fmllr'),
'--fmllr-update-type={}'.format(config.fmllr_update_type),
'--spk2utt=ark:' + spk2utt_path, mdl, dictionary_feat_string,
'ark,s,cs:-', 'ark:-'],
stdin=weight_silence_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
compose_proc = subprocess.Popen([thirdparty_binary('compose-transforms'),
'--b-is-affine=true', 'ark:-',
'ark:' + pre_trans_path, 'ark:' + trans_path],
stderr=log_file, stdin=fmllr_proc.stdout)
compose_proc.communicate()
def fmllr_rescore_func(directory, split_directory, sil_phones, job_name, mdl, config, feat_string, output_directory,
num_threads=None, dictionary_names=None):
if dictionary_names is None:
log_path = os.path.join(output_directory, 'log', 'fmllr_rescore.{}.log'.format(job_name))
tmp_lat_path = os.path.join(output_directory, 'lat.tmp.{}'.format(job_name))
final_lat_path = os.path.join(output_directory, 'lat.{}'.format(job_name))
with open(log_path, 'w', encoding='utf8') as log_file:
rescore_proc = subprocess.Popen([thirdparty_binary('gmm-rescore-lattice'),
mdl, 'ark:' + tmp_lat_path,
feat_string, 'ark:-'],
stdout=subprocess.PIPE, stderr=log_file)
if num_threads is None:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam={}'.format(config.lattice_beam),
'ark:-', 'ark:' + final_lat_path
], stdin=rescore_proc.stdout, stderr=log_file)
else:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned-parallel'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam={}'.format(config.lattice_beam),
'--num-threads={}'.format(num_threads),
'ark:-', 'ark:' + final_lat_path
], stdin=rescore_proc.stdout, stderr=log_file)
determinize_proc.communicate()
else:
for name in dictionary_names:
log_path = os.path.join(output_directory, 'log', 'fmllr_rescore.{}.{}.log'.format(job_name, name))
tmp_lat_path = os.path.join(output_directory, 'lat.tmp.{}.{}'.format(job_name, name))
final_lat_path = os.path.join(output_directory, 'lat.{}.{}'.format(job_name, name))
dictionary_feat_string = feat_string.replace('feats.{}.scp'.format(job_name),
'feats.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('cmvn.{}.scp'.format(job_name),
'cmvn.{}.{}.scp'.format(job_name, name))
dictionary_feat_string = dictionary_feat_string.replace('utt2spk.{}'.format(job_name),
'utt2spk.{}.{}'.format(job_name, name))
with open(log_path, 'w', encoding='utf8') as log_file:
rescore_proc = subprocess.Popen([thirdparty_binary('gmm-rescore-lattice'),
mdl, 'ark:' + tmp_lat_path,
dictionary_feat_string, 'ark:-'],
stdout=subprocess.PIPE, stderr=log_file)
if num_threads is None:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam={}'.format(config.lattice_beam),
'ark:-', 'ark:' + final_lat_path
], stdin=rescore_proc.stdout, stderr=log_file)
else:
determinize_proc = subprocess.Popen([thirdparty_binary('lattice-determinize-pruned-parallel'),
'--acoustic-scale={}'.format(config.acoustic_scale),
'--beam={}'.format(config.lattice_beam),
'--num-threads={}'.format(num_threads),
'ark:-', 'ark:' + final_lat_path
], stdin=rescore_proc.stdout, stderr=log_file)
determinize_proc.communicate()
def transcribe_fmllr(transcriber):
model_directory = transcriber.model_directory
output_directory = transcriber.transcribe_directory
config = transcriber.transcribe_config
corpus = transcriber.corpus
num_jobs = corpus.num_jobs
split_directory = corpus.split_directory()
sil_phones = transcriber.dictionary.optional_silence_csl
fmllr_directory = os.path.join(output_directory, 'fmllr')
log_dir = os.path.join(fmllr_directory, 'log')
os.makedirs(log_dir, exist_ok=True)
mdl_path = os.path.join(model_directory, 'final.mdl')
if num_jobs > 1:
num_threads = None
else:
num_threads = corpus.original_num_jobs
jobs = [(output_directory, split_directory, sil_phones, x, mdl_path, config,
config.feature_config.construct_feature_proc_string(split_directory, model_directory, x),
fmllr_directory, num_threads, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
run_non_mp(initial_fmllr_func, jobs, log_dir)
jobs = [(model_directory, split_directory, sil_phones, x, mdl_path, config,
config.feature_config.construct_feature_proc_string(split_directory, model_directory, x),
fmllr_directory, corpus.original_num_jobs, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
run_non_mp(lat_gen_fmllr_func, jobs, log_dir)
jobs = [(model_directory, split_directory, sil_phones, x, mdl_path, config,
config.feature_config.construct_feature_proc_string(split_directory, model_directory, x),
output_directory, fmllr_directory, num_threads, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
run_non_mp(final_fmllr_est_func, jobs, log_dir)
jobs = [(model_directory, split_directory, sil_phones, x, mdl_path, config,
config.feature_config.construct_feature_proc_string(split_directory, model_directory, x),
fmllr_directory, num_threads, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(fmllr_rescore_func, jobs, log_dir)
else:
run_non_mp(fmllr_rescore_func, jobs, log_dir)
jobs = [(model_directory, x, config,
config.feature_config.construct_feature_proc_string(corpus.split_directory(), model_directory, x),
fmllr_directory, num_threads, transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(lm_rescore_func, jobs, log_dir)
else:
run_non_mp(lm_rescore_func, jobs, log_dir)
if config.use_mp:
run_mp(carpa_lm_rescore_func, jobs, log_dir)
else:
run_non_mp(carpa_lm_rescore_func, jobs, log_dir)
if transcriber.evaluation_mode:
best_wer = 10000
best = None
for lmwt in range(transcriber.min_language_model_weight, transcriber.max_language_model_weight):
for wip in transcriber.word_insertion_penalties:
out_dir = os.path.join(fmllr_directory, 'eval_{}_{}'.format(lmwt, wip))
log_dir = os.path.join(out_dir, 'log')
os.makedirs(log_dir, exist_ok=True)
jobs = [(model_directory, fmllr_directory, x, config, out_dir, lmwt, wip,
transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(score_func, jobs, log_dir)
else:
run_non_mp(score_func, jobs, log_dir)
ser, wer = transcriber.evaluate(out_dir, out_dir)
if wer < best_wer:
best = (lmwt, wip)
transcriber.transcribe_config.language_model_weight = best[0]
transcriber.transcribe_config.word_insertion_penalty = best[1]
out_dir = os.path.join(fmllr_directory, 'eval_{}_{}'.format(best[0], best[1]))
for filename in os.listdir(out_dir):
if not filename.startswith('tra'):
continue
tra_path = os.path.join(out_dir, filename)
saved_tra_path = os.path.join(fmllr_directory, filename)
shutil.copyfile(tra_path, saved_tra_path)
else:
jobs = [(model_directory, fmllr_directory, x, config, fmllr_directory, None, None,
transcriber.dictionaries_for_job(x))
for x in range(num_jobs)]
if config.use_mp:
run_mp(score_func, jobs, log_dir)
else:
run_non_mp(score_func, jobs, log_dir)
| 67.847826 | 120 | 0.504582 | 4,529 | 46,815 | 4.921175 | 0.041289 | 0.030151 | 0.039483 | 0.050251 | 0.954729 | 0.947864 | 0.940506 | 0.933686 | 0.926777 | 0.918431 | 0 | 0.002744 | 0.385026 | 46,815 | 689 | 121 | 67.946299 | 0.771414 | 0 | 0 | 0.801252 | 0 | 0 | 0.107833 | 0.025682 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015649 | false | 0 | 0.00939 | 0 | 0.028169 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ec88d90699a896c9141151ec45ea2a54476b8e8 | 146 | py | Python | datasette/__init__.py | kevboh/datasette | b7257a21bf3dfa7353980f343c83a616da44daa7 | [
"Apache-2.0"
] | 1 | 2018-06-01T22:15:11.000Z | 2018-06-01T22:15:11.000Z | datasette/__init__.py | kevboh/datasette | b7257a21bf3dfa7353980f343c83a616da44daa7 | [
"Apache-2.0"
] | null | null | null | datasette/__init__.py | kevboh/datasette | b7257a21bf3dfa7353980f343c83a616da44daa7 | [
"Apache-2.0"
] | null | null | null | from datasette.version import __version_info__, __version__ # noqa
from .hookspecs import hookimpl # noqa
from .hookspecs import hookspec # noqa
| 36.5 | 67 | 0.815068 | 18 | 146 | 6.111111 | 0.5 | 0.145455 | 0.309091 | 0.418182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136986 | 146 | 3 | 68 | 48.666667 | 0.873016 | 0.09589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1edb95ed138aea2452f3f06a857d98968146d03f | 382 | py | Python | generated-libraries/python/netapp/security_certificate/numbits.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/security_certificate/numbits.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/security_certificate/numbits.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | class Numbits(basestring):
"""
size of requested certificate in bits
Possible values:
<ul>
<li> "512" - 512 bits private key,
<li> "1024" - 1024 bits private key,
<li> "1536" - 1536 bits private key,
<li> "2048" - 2048 bits private key
</ul>
"""
@staticmethod
def get_api_name():
return "numbits"
| 22.470588 | 44 | 0.539267 | 44 | 382 | 4.636364 | 0.568182 | 0.215686 | 0.27451 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12 | 0.34555 | 382 | 16 | 45 | 23.875 | 0.696 | 0.594241 | 0 | 0 | 0 | 0 | 0.063063 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0 | 0.25 | 0.75 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
9492fe556e20d7e49876cbf31b0b9f818a2317d4 | 209 | py | Python | src/gpt2/policy_generation/__init__.py | CyberAgentAILab/GPT2 | 12ccd5ac8a7f38bc6b2e0aacf36af847009eb912 | [
"Apache-2.0"
] | 1 | 2022-03-23T04:54:46.000Z | 2022-03-23T04:54:46.000Z | src/gpt2/policy_generation/__init__.py | CyberAgentAILab/GPT2-ailab | 12ccd5ac8a7f38bc6b2e0aacf36af847009eb912 | [
"Apache-2.0"
] | null | null | null | src/gpt2/policy_generation/__init__.py | CyberAgentAILab/GPT2-ailab | 12ccd5ac8a7f38bc6b2e0aacf36af847009eb912 | [
"Apache-2.0"
] | null | null | null | from gpt2.policy_generation.specification import PolicyGenerationSpec
from gpt2.policy_generation.configuration import PolicyGenerateConfig
from gpt2.policy_generation.policy_generation import PolicyGenerator
| 52.25 | 69 | 0.913876 | 22 | 209 | 8.5 | 0.454545 | 0.342246 | 0.224599 | 0.385027 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015228 | 0.057416 | 209 | 3 | 70 | 69.666667 | 0.93401 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
a224cfb804902b782c30f8ff72f87bd6a7593e3e | 314,644 | py | Python | esociallib/v2_04/evtAltCadastral.py | akretion/esociallib | 2472d68b45610638cf10d87aeed48b917ebae6d8 | [
"MIT"
] | 6 | 2018-02-16T09:59:35.000Z | 2021-09-01T20:40:02.000Z | esociallib/v2_04/evtAltCadastral.py | akretion/esociallib | 2472d68b45610638cf10d87aeed48b917ebae6d8 | [
"MIT"
] | 2 | 2018-02-02T19:32:21.000Z | 2019-01-25T14:43:05.000Z | esociallib/v2_04/evtAltCadastral.py | akretion/esociallib | 2472d68b45610638cf10d87aeed48b917ebae6d8 | [
"MIT"
] | 2 | 2018-05-03T17:16:38.000Z | 2021-04-02T19:17:31.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Tue Oct 10 00:42:20 2017 by generateDS.py version 2.28b.
# Python 2.7.12 (default, Nov 19 2016, 06:48:10) [GCC 5.4.0 20160609]
#
# Command line options:
# ('--no-process-includes', '')
# ('-o', 'esociallib/v2_04/evtAltCadastral.py')
#
# Command line arguments:
# schemas/v2_04/evtAltCadastral.xsd
#
# Command line:
# /usr/local/bin/generateDS --no-process-includes -o "esociallib/v2_04/evtAltCadastral.py" schemas/v2_04/evtAltCadastral.xsd
#
# Current working directory (os.getcwd()):
# esociallib
#
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class eSocial(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, evtAltCadastral=None, Signature=None):
self.original_tagname_ = None
self.evtAltCadastral = evtAltCadastral
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, eSocial)
if subclass is not None:
return subclass(*args_, **kwargs_)
if eSocial.subclass:
return eSocial.subclass(*args_, **kwargs_)
else:
return eSocial(*args_, **kwargs_)
factory = staticmethod(factory)
def get_evtAltCadastral(self): return self.evtAltCadastral
def set_evtAltCadastral(self, evtAltCadastral): self.evtAltCadastral = evtAltCadastral
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.evtAltCadastral is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='eSocial', namespacedef_=' xmlns:ds="http://www.w3.org/2000/09/xmldsig#" ', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('eSocial')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='eSocial')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='eSocial', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='eSocial'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='eSocial', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.evtAltCadastral is not None:
self.evtAltCadastral.export(outfile, level, namespace_, name_='evtAltCadastral', pretty_print=pretty_print)
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % ('ds:', self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), 'ds:', eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'evtAltCadastral':
obj_ = evtAltCadastral.factory()
obj_.build(child_)
self.evtAltCadastral = obj_
obj_.original_tagname_ = 'evtAltCadastral'
elif nodeName_ == 'Signature':
Signature_ = child_.text
Signature_ = self.gds_validate_string(Signature_, node, 'Signature')
self.Signature = Signature_
# end class eSocial
class evtAltCadastral(GeneratedsSuper):
"""Evento Alteração Cadastral do Trabalhador"""
subclass = None
superclass = None
def __init__(self, Id=None, ideEvento=None, ideEmpregador=None, ideTrabalhador=None, alteracao=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.ideEvento = ideEvento
self.ideEmpregador = ideEmpregador
self.ideTrabalhador = ideTrabalhador
self.alteracao = alteracao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, evtAltCadastral)
if subclass is not None:
return subclass(*args_, **kwargs_)
if evtAltCadastral.subclass:
return evtAltCadastral.subclass(*args_, **kwargs_)
else:
return evtAltCadastral(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ideEvento(self): return self.ideEvento
def set_ideEvento(self, ideEvento): self.ideEvento = ideEvento
def get_ideEmpregador(self): return self.ideEmpregador
def set_ideEmpregador(self, ideEmpregador): self.ideEmpregador = ideEmpregador
def get_ideTrabalhador(self): return self.ideTrabalhador
def set_ideTrabalhador(self, ideTrabalhador): self.ideTrabalhador = ideTrabalhador
def get_alteracao(self): return self.alteracao
def set_alteracao(self, alteracao): self.alteracao = alteracao
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.ideEvento is not None or
self.ideEmpregador is not None or
self.ideTrabalhador is not None or
self.alteracao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='evtAltCadastral', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('evtAltCadastral')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='evtAltCadastral')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='evtAltCadastral', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='evtAltCadastral'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='', name_='evtAltCadastral', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ideEvento is not None:
self.ideEvento.export(outfile, level, namespace_, name_='ideEvento', pretty_print=pretty_print)
if self.ideEmpregador is not None:
self.ideEmpregador.export(outfile, level, namespace_, name_='ideEmpregador', pretty_print=pretty_print)
if self.ideTrabalhador is not None:
self.ideTrabalhador.export(outfile, level, namespace_, name_='ideTrabalhador', pretty_print=pretty_print)
if self.alteracao is not None:
self.alteracao.export(outfile, level, namespace_, name_='alteracao', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ideEvento':
obj_ = TIdeEveTrab.factory()
obj_.build(child_)
self.ideEvento = obj_
obj_.original_tagname_ = 'ideEvento'
elif nodeName_ == 'ideEmpregador':
obj_ = TEmpregador.factory()
obj_.build(child_)
self.ideEmpregador = obj_
obj_.original_tagname_ = 'ideEmpregador'
elif nodeName_ == 'ideTrabalhador':
obj_ = ideTrabalhador.factory()
obj_.build(child_)
self.ideTrabalhador = obj_
obj_.original_tagname_ = 'ideTrabalhador'
elif nodeName_ == 'alteracao':
obj_ = alteracao.factory()
obj_.build(child_)
self.alteracao = obj_
obj_.original_tagname_ = 'alteracao'
# end class evtAltCadastral
class ideTrabalhador(GeneratedsSuper):
"""Identificação do Trabalhador"""
subclass = None
superclass = None
def __init__(self, cpfTrab=None):
self.original_tagname_ = None
self.cpfTrab = cpfTrab
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ideTrabalhador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ideTrabalhador.subclass:
return ideTrabalhador.subclass(*args_, **kwargs_)
else:
return ideTrabalhador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cpfTrab(self): return self.cpfTrab
def set_cpfTrab(self, cpfTrab): self.cpfTrab = cpfTrab
def hasContent_(self):
if (
self.cpfTrab is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ideTrabalhador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ideTrabalhador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ideTrabalhador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ideTrabalhador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ideTrabalhador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ideTrabalhador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cpfTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfTrab>%s</%scpfTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfTrab), input_name='cpfTrab')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cpfTrab':
cpfTrab_ = child_.text
cpfTrab_ = self.gds_validate_string(cpfTrab_, node, 'cpfTrab')
self.cpfTrab = cpfTrab_
# end class ideTrabalhador
class cpfTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfTrab.subclass:
return cpfTrab.subclass(*args_, **kwargs_)
else:
return cpfTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfTrab
class alteracao(GeneratedsSuper):
"""Alteração de Dados Cadastrais do Trabalhador"""
subclass = None
superclass = None
def __init__(self, dtAlteracao=None, dadosTrabalhador=None):
self.original_tagname_ = None
if isinstance(dtAlteracao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtAlteracao, '%Y-%m-%d').date()
else:
initvalue_ = dtAlteracao
self.dtAlteracao = initvalue_
self.dadosTrabalhador = dadosTrabalhador
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, alteracao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if alteracao.subclass:
return alteracao.subclass(*args_, **kwargs_)
else:
return alteracao(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtAlteracao(self): return self.dtAlteracao
def set_dtAlteracao(self, dtAlteracao): self.dtAlteracao = dtAlteracao
def get_dadosTrabalhador(self): return self.dadosTrabalhador
def set_dadosTrabalhador(self, dadosTrabalhador): self.dadosTrabalhador = dadosTrabalhador
def hasContent_(self):
if (
self.dtAlteracao is not None or
self.dadosTrabalhador is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='alteracao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('alteracao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='alteracao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='alteracao', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='alteracao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='alteracao', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtAlteracao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtAlteracao>%s</%sdtAlteracao>%s' % (namespace_, self.gds_format_date(self.dtAlteracao, input_name='dtAlteracao'), namespace_, eol_))
if self.dadosTrabalhador is not None:
self.dadosTrabalhador.export(outfile, level, namespace_, name_='dadosTrabalhador', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtAlteracao':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtAlteracao = dval_
elif nodeName_ == 'dadosTrabalhador':
obj_ = dadosTrabalhador.factory()
obj_.build(child_)
self.dadosTrabalhador = obj_
obj_.original_tagname_ = 'dadosTrabalhador'
# end class alteracao
class dtAlteracao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtAlteracao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtAlteracao.subclass:
return dtAlteracao.subclass(*args_, **kwargs_)
else:
return dtAlteracao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtAlteracao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtAlteracao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtAlteracao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtAlteracao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtAlteracao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtAlteracao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtAlteracao
class dadosTrabalhador(GeneratedsSuper):
"""Informações Pessoais do Trabalhador"""
subclass = None
superclass = None
def __init__(self, nisTrab=None, nmTrab=None, sexo=None, racaCor=None, estCiv=None, grauInstr=None, nmSoc=None, documentos=None, endereco=None, trabEstrangeiro=None, infoDeficiencia=None, dependente=None, aposentadoria=None, contato=None):
self.original_tagname_ = None
self.nisTrab = nisTrab
self.nmTrab = nmTrab
self.sexo = sexo
self.racaCor = racaCor
self.estCiv = estCiv
self.grauInstr = grauInstr
self.nmSoc = nmSoc
self.documentos = documentos
self.endereco = endereco
self.trabEstrangeiro = trabEstrangeiro
self.infoDeficiencia = infoDeficiencia
if dependente is None:
self.dependente = []
else:
self.dependente = dependente
self.aposentadoria = aposentadoria
self.contato = contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dadosTrabalhador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dadosTrabalhador.subclass:
return dadosTrabalhador.subclass(*args_, **kwargs_)
else:
return dadosTrabalhador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nisTrab(self): return self.nisTrab
def set_nisTrab(self, nisTrab): self.nisTrab = nisTrab
def get_nmTrab(self): return self.nmTrab
def set_nmTrab(self, nmTrab): self.nmTrab = nmTrab
def get_sexo(self): return self.sexo
def set_sexo(self, sexo): self.sexo = sexo
def get_racaCor(self): return self.racaCor
def set_racaCor(self, racaCor): self.racaCor = racaCor
def get_estCiv(self): return self.estCiv
def set_estCiv(self, estCiv): self.estCiv = estCiv
def get_grauInstr(self): return self.grauInstr
def set_grauInstr(self, grauInstr): self.grauInstr = grauInstr
def get_nmSoc(self): return self.nmSoc
def set_nmSoc(self, nmSoc): self.nmSoc = nmSoc
def get_documentos(self): return self.documentos
def set_documentos(self, documentos): self.documentos = documentos
def get_endereco(self): return self.endereco
def set_endereco(self, endereco): self.endereco = endereco
def get_trabEstrangeiro(self): return self.trabEstrangeiro
def set_trabEstrangeiro(self, trabEstrangeiro): self.trabEstrangeiro = trabEstrangeiro
def get_infoDeficiencia(self): return self.infoDeficiencia
def set_infoDeficiencia(self, infoDeficiencia): self.infoDeficiencia = infoDeficiencia
def get_dependente(self): return self.dependente
def set_dependente(self, dependente): self.dependente = dependente
def add_dependente(self, value): self.dependente.append(value)
def insert_dependente_at(self, index, value): self.dependente.insert(index, value)
def replace_dependente_at(self, index, value): self.dependente[index] = value
def get_aposentadoria(self): return self.aposentadoria
def set_aposentadoria(self, aposentadoria): self.aposentadoria = aposentadoria
def get_contato(self): return self.contato
def set_contato(self, contato): self.contato = contato
def hasContent_(self):
if (
self.nisTrab is not None or
self.nmTrab is not None or
self.sexo is not None or
self.racaCor is not None or
self.estCiv is not None or
self.grauInstr is not None or
self.nmSoc is not None or
self.documentos is not None or
self.endereco is not None or
self.trabEstrangeiro is not None or
self.infoDeficiencia is not None or
self.dependente or
self.aposentadoria is not None or
self.contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dadosTrabalhador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dadosTrabalhador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dadosTrabalhador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dadosTrabalhador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dadosTrabalhador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dadosTrabalhador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nisTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snisTrab>%s</%snisTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nisTrab), input_name='nisTrab')), namespace_, eol_))
if self.nmTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmTrab>%s</%snmTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmTrab), input_name='nmTrab')), namespace_, eol_))
if self.sexo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%ssexo>%s</%ssexo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.sexo), input_name='sexo')), namespace_, eol_))
if self.racaCor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sracaCor>%s</%sracaCor>%s' % (namespace_, self.gds_format_integer(self.racaCor, input_name='racaCor'), namespace_, eol_))
if self.estCiv is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sestCiv>%s</%sestCiv>%s' % (namespace_, self.gds_format_integer(self.estCiv, input_name='estCiv'), namespace_, eol_))
if self.grauInstr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sgrauInstr>%s</%sgrauInstr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.grauInstr), input_name='grauInstr')), namespace_, eol_))
if self.nmSoc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmSoc>%s</%snmSoc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmSoc), input_name='nmSoc')), namespace_, eol_))
if self.documentos is not None:
self.documentos.export(outfile, level, namespace_, name_='documentos', pretty_print=pretty_print)
if self.endereco is not None:
self.endereco.export(outfile, level, namespace_, name_='endereco', pretty_print=pretty_print)
if self.trabEstrangeiro is not None:
self.trabEstrangeiro.export(outfile, level, namespace_, name_='trabEstrangeiro', pretty_print=pretty_print)
if self.infoDeficiencia is not None:
self.infoDeficiencia.export(outfile, level, namespace_, name_='infoDeficiencia', pretty_print=pretty_print)
for dependente_ in self.dependente:
dependente_.export(outfile, level, namespace_, name_='dependente', pretty_print=pretty_print)
if self.aposentadoria is not None:
self.aposentadoria.export(outfile, level, namespace_, name_='aposentadoria', pretty_print=pretty_print)
if self.contato is not None:
self.contato.export(outfile, level, namespace_, name_='contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nisTrab':
nisTrab_ = child_.text
nisTrab_ = self.gds_validate_string(nisTrab_, node, 'nisTrab')
self.nisTrab = nisTrab_
elif nodeName_ == 'nmTrab':
nmTrab_ = child_.text
nmTrab_ = self.gds_validate_string(nmTrab_, node, 'nmTrab')
self.nmTrab = nmTrab_
elif nodeName_ == 'sexo':
sexo_ = child_.text
sexo_ = self.gds_validate_string(sexo_, node, 'sexo')
self.sexo = sexo_
elif nodeName_ == 'racaCor':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'racaCor')
self.racaCor = ival_
elif nodeName_ == 'estCiv':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'estCiv')
self.estCiv = ival_
elif nodeName_ == 'grauInstr':
grauInstr_ = child_.text
grauInstr_ = self.gds_validate_string(grauInstr_, node, 'grauInstr')
self.grauInstr = grauInstr_
elif nodeName_ == 'nmSoc':
nmSoc_ = child_.text
nmSoc_ = self.gds_validate_string(nmSoc_, node, 'nmSoc')
self.nmSoc = nmSoc_
elif nodeName_ == 'documentos':
obj_ = documentos.factory()
obj_.build(child_)
self.documentos = obj_
obj_.original_tagname_ = 'documentos'
elif nodeName_ == 'endereco':
obj_ = endereco.factory()
obj_.build(child_)
self.endereco = obj_
obj_.original_tagname_ = 'endereco'
elif nodeName_ == 'trabEstrangeiro':
obj_ = TTrabEstrang.factory()
obj_.build(child_)
self.trabEstrangeiro = obj_
obj_.original_tagname_ = 'trabEstrangeiro'
elif nodeName_ == 'infoDeficiencia':
obj_ = infoDeficiencia.factory()
obj_.build(child_)
self.infoDeficiencia = obj_
obj_.original_tagname_ = 'infoDeficiencia'
elif nodeName_ == 'dependente':
obj_ = TDependente.factory()
obj_.build(child_)
self.dependente.append(obj_)
obj_.original_tagname_ = 'dependente'
elif nodeName_ == 'aposentadoria':
obj_ = aposentadoria.factory()
obj_.build(child_)
self.aposentadoria = obj_
obj_.original_tagname_ = 'aposentadoria'
elif nodeName_ == 'contato':
obj_ = TContato.factory()
obj_.build(child_)
self.contato = obj_
obj_.original_tagname_ = 'contato'
# end class dadosTrabalhador
class nisTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nisTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nisTrab.subclass:
return nisTrab.subclass(*args_, **kwargs_)
else:
return nisTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nisTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nisTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nisTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nisTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nisTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nisTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nisTrab
class nmTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmTrab.subclass:
return nmTrab.subclass(*args_, **kwargs_)
else:
return nmTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmTrab
class sexo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, sexo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if sexo.subclass:
return sexo.subclass(*args_, **kwargs_)
else:
return sexo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='sexo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('sexo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sexo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='sexo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='sexo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='sexo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class sexo
class racaCor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, racaCor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if racaCor.subclass:
return racaCor.subclass(*args_, **kwargs_)
else:
return racaCor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='racaCor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('racaCor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='racaCor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='racaCor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='racaCor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='racaCor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class racaCor
class estCiv(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, estCiv)
if subclass is not None:
return subclass(*args_, **kwargs_)
if estCiv.subclass:
return estCiv.subclass(*args_, **kwargs_)
else:
return estCiv(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='estCiv', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('estCiv')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='estCiv')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='estCiv', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='estCiv'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='estCiv', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class estCiv
class grauInstr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, grauInstr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if grauInstr.subclass:
return grauInstr.subclass(*args_, **kwargs_)
else:
return grauInstr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='grauInstr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('grauInstr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='grauInstr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='grauInstr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='grauInstr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='grauInstr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class grauInstr
class nmSoc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmSoc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmSoc.subclass:
return nmSoc.subclass(*args_, **kwargs_)
else:
return nmSoc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmSoc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmSoc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmSoc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmSoc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmSoc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmSoc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmSoc
class documentos(GeneratedsSuper):
"""Informações dos documentos pessoais do trabalhador"""
subclass = None
superclass = None
def __init__(self, CTPS=None, RIC=None, RG=None, RNE=None, OC=None, CNH=None):
self.original_tagname_ = None
self.CTPS = CTPS
self.RIC = RIC
self.RG = RG
self.RNE = RNE
self.OC = OC
self.CNH = CNH
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, documentos)
if subclass is not None:
return subclass(*args_, **kwargs_)
if documentos.subclass:
return documentos.subclass(*args_, **kwargs_)
else:
return documentos(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CTPS(self): return self.CTPS
def set_CTPS(self, CTPS): self.CTPS = CTPS
def get_RIC(self): return self.RIC
def set_RIC(self, RIC): self.RIC = RIC
def get_RG(self): return self.RG
def set_RG(self, RG): self.RG = RG
def get_RNE(self): return self.RNE
def set_RNE(self, RNE): self.RNE = RNE
def get_OC(self): return self.OC
def set_OC(self, OC): self.OC = OC
def get_CNH(self): return self.CNH
def set_CNH(self, CNH): self.CNH = CNH
def hasContent_(self):
if (
self.CTPS is not None or
self.RIC is not None or
self.RG is not None or
self.RNE is not None or
self.OC is not None or
self.CNH is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='documentos', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('documentos')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='documentos')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='documentos', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='documentos'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='documentos', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CTPS is not None:
self.CTPS.export(outfile, level, namespace_, name_='CTPS', pretty_print=pretty_print)
if self.RIC is not None:
self.RIC.export(outfile, level, namespace_, name_='RIC', pretty_print=pretty_print)
if self.RG is not None:
self.RG.export(outfile, level, namespace_, name_='RG', pretty_print=pretty_print)
if self.RNE is not None:
self.RNE.export(outfile, level, namespace_, name_='RNE', pretty_print=pretty_print)
if self.OC is not None:
self.OC.export(outfile, level, namespace_, name_='OC', pretty_print=pretty_print)
if self.CNH is not None:
self.CNH.export(outfile, level, namespace_, name_='CNH', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CTPS':
obj_ = TCtps.factory()
obj_.build(child_)
self.CTPS = obj_
obj_.original_tagname_ = 'CTPS'
elif nodeName_ == 'RIC':
obj_ = TRic.factory()
obj_.build(child_)
self.RIC = obj_
obj_.original_tagname_ = 'RIC'
elif nodeName_ == 'RG':
obj_ = TRg.factory()
obj_.build(child_)
self.RG = obj_
obj_.original_tagname_ = 'RG'
elif nodeName_ == 'RNE':
obj_ = TRne.factory()
obj_.build(child_)
self.RNE = obj_
obj_.original_tagname_ = 'RNE'
elif nodeName_ == 'OC':
obj_ = TOc.factory()
obj_.build(child_)
self.OC = obj_
obj_.original_tagname_ = 'OC'
elif nodeName_ == 'CNH':
obj_ = TCnh.factory()
obj_.build(child_)
self.CNH = obj_
obj_.original_tagname_ = 'CNH'
# end class documentos
class endereco(GeneratedsSuper):
"""Grupo de informações do endereço do Trabalhador"""
subclass = None
superclass = None
def __init__(self, brasil=None, exterior=None):
self.original_tagname_ = None
self.brasil = brasil
self.exterior = exterior
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, endereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if endereco.subclass:
return endereco.subclass(*args_, **kwargs_)
else:
return endereco(*args_, **kwargs_)
factory = staticmethod(factory)
def get_brasil(self): return self.brasil
def set_brasil(self, brasil): self.brasil = brasil
def get_exterior(self): return self.exterior
def set_exterior(self, exterior): self.exterior = exterior
def hasContent_(self):
if (
self.brasil is not None or
self.exterior is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='endereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('endereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='endereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='endereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='endereco'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='endereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.brasil is not None:
self.brasil.export(outfile, level, namespace_, name_='brasil', pretty_print=pretty_print)
if self.exterior is not None:
self.exterior.export(outfile, level, namespace_, name_='exterior', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'brasil':
obj_ = TEnderecoBrasil.factory()
obj_.build(child_)
self.brasil = obj_
obj_.original_tagname_ = 'brasil'
elif nodeName_ == 'exterior':
obj_ = TEnderecoExterior.factory()
obj_.build(child_)
self.exterior = obj_
obj_.original_tagname_ = 'exterior'
# end class endereco
class infoDeficiencia(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, defFisica=None, defVisual=None, defAuditiva=None, defMental=None, defIntelectual=None, reabReadap=None, infoCota=None, observacao=None):
self.original_tagname_ = None
self.defFisica = defFisica
self.defVisual = defVisual
self.defAuditiva = defAuditiva
self.defMental = defMental
self.defIntelectual = defIntelectual
self.reabReadap = reabReadap
self.infoCota = infoCota
self.observacao = observacao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoDeficiencia)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoDeficiencia.subclass:
return infoDeficiencia.subclass(*args_, **kwargs_)
else:
return infoDeficiencia(*args_, **kwargs_)
factory = staticmethod(factory)
def get_defFisica(self): return self.defFisica
def set_defFisica(self, defFisica): self.defFisica = defFisica
def get_defVisual(self): return self.defVisual
def set_defVisual(self, defVisual): self.defVisual = defVisual
def get_defAuditiva(self): return self.defAuditiva
def set_defAuditiva(self, defAuditiva): self.defAuditiva = defAuditiva
def get_defMental(self): return self.defMental
def set_defMental(self, defMental): self.defMental = defMental
def get_defIntelectual(self): return self.defIntelectual
def set_defIntelectual(self, defIntelectual): self.defIntelectual = defIntelectual
def get_reabReadap(self): return self.reabReadap
def set_reabReadap(self, reabReadap): self.reabReadap = reabReadap
def get_infoCota(self): return self.infoCota
def set_infoCota(self, infoCota): self.infoCota = infoCota
def get_observacao(self): return self.observacao
def set_observacao(self, observacao): self.observacao = observacao
def hasContent_(self):
if (
self.defFisica is not None or
self.defVisual is not None or
self.defAuditiva is not None or
self.defMental is not None or
self.defIntelectual is not None or
self.reabReadap is not None or
self.infoCota is not None or
self.observacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoDeficiencia', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoDeficiencia')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoDeficiencia')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoDeficiencia', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoDeficiencia'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoDeficiencia', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.defFisica is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefFisica>%s</%sdefFisica>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defFisica), input_name='defFisica')), namespace_, eol_))
if self.defVisual is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefVisual>%s</%sdefVisual>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defVisual), input_name='defVisual')), namespace_, eol_))
if self.defAuditiva is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefAuditiva>%s</%sdefAuditiva>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defAuditiva), input_name='defAuditiva')), namespace_, eol_))
if self.defMental is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefMental>%s</%sdefMental>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defMental), input_name='defMental')), namespace_, eol_))
if self.defIntelectual is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefIntelectual>%s</%sdefIntelectual>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defIntelectual), input_name='defIntelectual')), namespace_, eol_))
if self.reabReadap is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sreabReadap>%s</%sreabReadap>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.reabReadap), input_name='reabReadap')), namespace_, eol_))
if self.infoCota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sinfoCota>%s</%sinfoCota>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.infoCota), input_name='infoCota')), namespace_, eol_))
if self.observacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sobservacao>%s</%sobservacao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.observacao), input_name='observacao')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'defFisica':
defFisica_ = child_.text
defFisica_ = self.gds_validate_string(defFisica_, node, 'defFisica')
self.defFisica = defFisica_
elif nodeName_ == 'defVisual':
defVisual_ = child_.text
defVisual_ = self.gds_validate_string(defVisual_, node, 'defVisual')
self.defVisual = defVisual_
elif nodeName_ == 'defAuditiva':
defAuditiva_ = child_.text
defAuditiva_ = self.gds_validate_string(defAuditiva_, node, 'defAuditiva')
self.defAuditiva = defAuditiva_
elif nodeName_ == 'defMental':
defMental_ = child_.text
defMental_ = self.gds_validate_string(defMental_, node, 'defMental')
self.defMental = defMental_
elif nodeName_ == 'defIntelectual':
defIntelectual_ = child_.text
defIntelectual_ = self.gds_validate_string(defIntelectual_, node, 'defIntelectual')
self.defIntelectual = defIntelectual_
elif nodeName_ == 'reabReadap':
reabReadap_ = child_.text
reabReadap_ = self.gds_validate_string(reabReadap_, node, 'reabReadap')
self.reabReadap = reabReadap_
elif nodeName_ == 'infoCota':
infoCota_ = child_.text
infoCota_ = self.gds_validate_string(infoCota_, node, 'infoCota')
self.infoCota = infoCota_
elif nodeName_ == 'observacao':
observacao_ = child_.text
observacao_ = self.gds_validate_string(observacao_, node, 'observacao')
self.observacao = observacao_
# end class infoDeficiencia
class defFisica(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defFisica)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defFisica.subclass:
return defFisica.subclass(*args_, **kwargs_)
else:
return defFisica(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defFisica', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defFisica')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defFisica')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defFisica', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defFisica'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defFisica', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defFisica
class defVisual(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defVisual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defVisual.subclass:
return defVisual.subclass(*args_, **kwargs_)
else:
return defVisual(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defVisual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defVisual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defVisual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defVisual', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defVisual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defVisual', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defVisual
class defAuditiva(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defAuditiva)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defAuditiva.subclass:
return defAuditiva.subclass(*args_, **kwargs_)
else:
return defAuditiva(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defAuditiva', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defAuditiva')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defAuditiva')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defAuditiva', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defAuditiva'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defAuditiva', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defAuditiva
class defMental(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defMental)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defMental.subclass:
return defMental.subclass(*args_, **kwargs_)
else:
return defMental(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defMental', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defMental')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defMental')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defMental', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defMental'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defMental', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defMental
class defIntelectual(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defIntelectual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defIntelectual.subclass:
return defIntelectual.subclass(*args_, **kwargs_)
else:
return defIntelectual(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defIntelectual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defIntelectual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defIntelectual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defIntelectual', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defIntelectual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defIntelectual', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defIntelectual
class reabReadap(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, reabReadap)
if subclass is not None:
return subclass(*args_, **kwargs_)
if reabReadap.subclass:
return reabReadap.subclass(*args_, **kwargs_)
else:
return reabReadap(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='reabReadap', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('reabReadap')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='reabReadap')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='reabReadap', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='reabReadap'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='reabReadap', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class reabReadap
class infoCota(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoCota)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoCota.subclass:
return infoCota.subclass(*args_, **kwargs_)
else:
return infoCota(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoCota', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoCota')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoCota')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoCota', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoCota'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoCota', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class infoCota
class observacao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, observacao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if observacao.subclass:
return observacao.subclass(*args_, **kwargs_)
else:
return observacao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='observacao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('observacao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='observacao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='observacao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='observacao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='observacao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class observacao
class aposentadoria(GeneratedsSuper):
"""Informação de aposentadoria do trabalhador"""
subclass = None
superclass = None
def __init__(self, trabAposent=None):
self.original_tagname_ = None
self.trabAposent = trabAposent
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, aposentadoria)
if subclass is not None:
return subclass(*args_, **kwargs_)
if aposentadoria.subclass:
return aposentadoria.subclass(*args_, **kwargs_)
else:
return aposentadoria(*args_, **kwargs_)
factory = staticmethod(factory)
def get_trabAposent(self): return self.trabAposent
def set_trabAposent(self, trabAposent): self.trabAposent = trabAposent
def hasContent_(self):
if (
self.trabAposent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='aposentadoria', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('aposentadoria')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='aposentadoria')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='aposentadoria', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='aposentadoria'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='aposentadoria', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.trabAposent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%strabAposent>%s</%strabAposent>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.trabAposent), input_name='trabAposent')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'trabAposent':
trabAposent_ = child_.text
trabAposent_ = self.gds_validate_string(trabAposent_, node, 'trabAposent')
self.trabAposent = trabAposent_
# end class aposentadoria
class trabAposent(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, trabAposent)
if subclass is not None:
return subclass(*args_, **kwargs_)
if trabAposent.subclass:
return trabAposent.subclass(*args_, **kwargs_)
else:
return trabAposent(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='trabAposent', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('trabAposent')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='trabAposent')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='trabAposent', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='trabAposent'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='trabAposent', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class trabAposent
class TIdeEveTrab(GeneratedsSuper):
"""Identificação do evento"""
subclass = None
superclass = None
def __init__(self, indRetif=None, nrRecibo=None, tpAmb=None, procEmi=None, verProc=None):
self.original_tagname_ = None
self.indRetif = indRetif
self.nrRecibo = nrRecibo
self.tpAmb = tpAmb
self.procEmi = procEmi
self.verProc = verProc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TIdeEveTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TIdeEveTrab.subclass:
return TIdeEveTrab.subclass(*args_, **kwargs_)
else:
return TIdeEveTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_indRetif(self): return self.indRetif
def set_indRetif(self, indRetif): self.indRetif = indRetif
def get_nrRecibo(self): return self.nrRecibo
def set_nrRecibo(self, nrRecibo): self.nrRecibo = nrRecibo
def get_tpAmb(self): return self.tpAmb
def set_tpAmb(self, tpAmb): self.tpAmb = tpAmb
def get_procEmi(self): return self.procEmi
def set_procEmi(self, procEmi): self.procEmi = procEmi
def get_verProc(self): return self.verProc
def set_verProc(self, verProc): self.verProc = verProc
def hasContent_(self):
if (
self.indRetif is not None or
self.nrRecibo is not None or
self.tpAmb is not None or
self.procEmi is not None or
self.verProc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TIdeEveTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TIdeEveTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TIdeEveTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TIdeEveTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TIdeEveTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TIdeEveTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.indRetif is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindRetif>%s</%sindRetif>%s' % (namespace_, self.gds_format_integer(self.indRetif, input_name='indRetif'), namespace_, eol_))
if self.nrRecibo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRecibo>%s</%snrRecibo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRecibo), input_name='nrRecibo')), namespace_, eol_))
if self.tpAmb is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpAmb>%s</%stpAmb>%s' % (namespace_, self.gds_format_integer(self.tpAmb, input_name='tpAmb'), namespace_, eol_))
if self.procEmi is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sprocEmi>%s</%sprocEmi>%s' % (namespace_, self.gds_format_integer(self.procEmi, input_name='procEmi'), namespace_, eol_))
if self.verProc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sverProc>%s</%sverProc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.verProc), input_name='verProc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'indRetif':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'indRetif')
self.indRetif = ival_
elif nodeName_ == 'nrRecibo':
nrRecibo_ = child_.text
nrRecibo_ = self.gds_validate_string(nrRecibo_, node, 'nrRecibo')
self.nrRecibo = nrRecibo_
elif nodeName_ == 'tpAmb':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpAmb')
self.tpAmb = ival_
elif nodeName_ == 'procEmi':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'procEmi')
self.procEmi = ival_
elif nodeName_ == 'verProc':
verProc_ = child_.text
verProc_ = self.gds_validate_string(verProc_, node, 'verProc')
self.verProc = verProc_
# end class TIdeEveTrab
class indRetif(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indRetif)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indRetif.subclass:
return indRetif.subclass(*args_, **kwargs_)
else:
return indRetif(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indRetif', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indRetif')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indRetif')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indRetif', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indRetif'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indRetif', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indRetif
class nrRecibo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRecibo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRecibo.subclass:
return nrRecibo.subclass(*args_, **kwargs_)
else:
return nrRecibo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRecibo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRecibo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRecibo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRecibo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRecibo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRecibo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRecibo
class tpAmb(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpAmb)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpAmb.subclass:
return tpAmb.subclass(*args_, **kwargs_)
else:
return tpAmb(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpAmb', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpAmb')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpAmb')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpAmb', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpAmb'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpAmb', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpAmb
class procEmi(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, procEmi)
if subclass is not None:
return subclass(*args_, **kwargs_)
if procEmi.subclass:
return procEmi.subclass(*args_, **kwargs_)
else:
return procEmi(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='procEmi', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('procEmi')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='procEmi')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='procEmi', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='procEmi'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='procEmi', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class procEmi
class verProc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, verProc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if verProc.subclass:
return verProc.subclass(*args_, **kwargs_)
else:
return verProc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='verProc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('verProc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='verProc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='verProc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='verProc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='verProc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class verProc
class TEmpregador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEmpregador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEmpregador.subclass:
return TEmpregador.subclass(*args_, **kwargs_)
else:
return TEmpregador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEmpregador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEmpregador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEmpregador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEmpregador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEmpregador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEmpregador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class TEmpregador
class tpInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpInsc.subclass:
return tpInsc.subclass(*args_, **kwargs_)
else:
return tpInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpInsc
class nrInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrInsc.subclass:
return nrInsc.subclass(*args_, **kwargs_)
else:
return nrInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrInsc
class TCtps(GeneratedsSuper):
"""Carteira de Trabalho e Previdência Social"""
subclass = None
superclass = None
def __init__(self, nrCtps=None, serieCtps=None, ufCtps=None):
self.original_tagname_ = None
self.nrCtps = nrCtps
self.serieCtps = serieCtps
self.ufCtps = ufCtps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TCtps.subclass:
return TCtps.subclass(*args_, **kwargs_)
else:
return TCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrCtps(self): return self.nrCtps
def set_nrCtps(self, nrCtps): self.nrCtps = nrCtps
def get_serieCtps(self): return self.serieCtps
def set_serieCtps(self, serieCtps): self.serieCtps = serieCtps
def get_ufCtps(self): return self.ufCtps
def set_ufCtps(self, ufCtps): self.ufCtps = ufCtps
def hasContent_(self):
if (
self.nrCtps is not None or
self.serieCtps is not None or
self.ufCtps is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TCtps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TCtps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrCtps>%s</%snrCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrCtps), input_name='nrCtps')), namespace_, eol_))
if self.serieCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sserieCtps>%s</%sserieCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.serieCtps), input_name='serieCtps')), namespace_, eol_))
if self.ufCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sufCtps>%s</%sufCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.ufCtps), input_name='ufCtps')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrCtps':
nrCtps_ = child_.text
nrCtps_ = self.gds_validate_string(nrCtps_, node, 'nrCtps')
self.nrCtps = nrCtps_
elif nodeName_ == 'serieCtps':
serieCtps_ = child_.text
serieCtps_ = self.gds_validate_string(serieCtps_, node, 'serieCtps')
self.serieCtps = serieCtps_
elif nodeName_ == 'ufCtps':
ufCtps_ = child_.text
ufCtps_ = self.gds_validate_string(ufCtps_, node, 'ufCtps')
self.ufCtps = ufCtps_
# end class TCtps
class nrCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrCtps.subclass:
return nrCtps.subclass(*args_, **kwargs_)
else:
return nrCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrCtps
class serieCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, serieCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if serieCtps.subclass:
return serieCtps.subclass(*args_, **kwargs_)
else:
return serieCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='serieCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('serieCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='serieCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='serieCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='serieCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='serieCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class serieCtps
class ufCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ufCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ufCtps.subclass:
return ufCtps.subclass(*args_, **kwargs_)
else:
return ufCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ufCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ufCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ufCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ufCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ufCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ufCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ufCtps
class TRic(GeneratedsSuper):
"""Registro de Identificação Civil"""
subclass = None
superclass = None
def __init__(self, nrRic=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRic = nrRic
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRic.subclass:
return TRic.subclass(*args_, **kwargs_)
else:
return TRic(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRic(self): return self.nrRic
def set_nrRic(self, nrRic): self.nrRic = nrRic
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRic is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRic', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRic', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRic>%s</%snrRic>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRic), input_name='nrRic')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRic':
nrRic_ = child_.text
nrRic_ = self.gds_validate_string(nrRic_, node, 'nrRic')
self.nrRic = nrRic_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRic
class nrRic(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRic.subclass:
return nrRic.subclass(*args_, **kwargs_)
else:
return nrRic(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRic', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRic', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRic
class orgaoEmissor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, orgaoEmissor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if orgaoEmissor.subclass:
return orgaoEmissor.subclass(*args_, **kwargs_)
else:
return orgaoEmissor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='orgaoEmissor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('orgaoEmissor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='orgaoEmissor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='orgaoEmissor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='orgaoEmissor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='orgaoEmissor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class orgaoEmissor
class dtExped(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtExped)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtExped.subclass:
return dtExped.subclass(*args_, **kwargs_)
else:
return dtExped(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtExped', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtExped')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtExped')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtExped', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtExped'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtExped', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtExped
class TRg(GeneratedsSuper):
"""Registro Geral"""
subclass = None
superclass = None
def __init__(self, nrRg=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRg = nrRg
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRg.subclass:
return TRg.subclass(*args_, **kwargs_)
else:
return TRg(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRg(self): return self.nrRg
def set_nrRg(self, nrRg): self.nrRg = nrRg
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRg is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRg', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRg', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRg>%s</%snrRg>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRg), input_name='nrRg')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRg':
nrRg_ = child_.text
nrRg_ = self.gds_validate_string(nrRg_, node, 'nrRg')
self.nrRg = nrRg_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRg
class nrRg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRg.subclass:
return nrRg.subclass(*args_, **kwargs_)
else:
return nrRg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRg
class TRne(GeneratedsSuper):
"""Registro Nacional de Estrangeiros"""
subclass = None
superclass = None
def __init__(self, nrRne=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRne = nrRne
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRne)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRne.subclass:
return TRne.subclass(*args_, **kwargs_)
else:
return TRne(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRne(self): return self.nrRne
def set_nrRne(self, nrRne): self.nrRne = nrRne
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRne is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRne', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRne')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRne')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRne', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRne'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRne', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRne is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRne>%s</%snrRne>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRne), input_name='nrRne')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRne':
nrRne_ = child_.text
nrRne_ = self.gds_validate_string(nrRne_, node, 'nrRne')
self.nrRne = nrRne_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRne
class nrRne(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRne)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRne.subclass:
return nrRne.subclass(*args_, **kwargs_)
else:
return nrRne(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRne', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRne')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRne')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRne', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRne'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRne', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRne
class TOc(GeneratedsSuper):
"""Órgão de Classe"""
subclass = None
superclass = None
def __init__(self, nrOc=None, orgaoEmissor=None, dtExped=None, dtValid=None):
self.original_tagname_ = None
self.nrOc = nrOc
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
if isinstance(dtValid, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtValid, '%Y-%m-%d').date()
else:
initvalue_ = dtValid
self.dtValid = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TOc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TOc.subclass:
return TOc.subclass(*args_, **kwargs_)
else:
return TOc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrOc(self): return self.nrOc
def set_nrOc(self, nrOc): self.nrOc = nrOc
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def get_dtValid(self): return self.dtValid
def set_dtValid(self, dtValid): self.dtValid = dtValid
def hasContent_(self):
if (
self.nrOc is not None or
self.orgaoEmissor is not None or
self.dtExped is not None or
self.dtValid is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TOc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TOc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TOc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TOc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TOc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TOc', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrOc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrOc>%s</%snrOc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrOc), input_name='nrOc')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
if self.dtValid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtValid>%s</%sdtValid>%s' % (namespace_, self.gds_format_date(self.dtValid, input_name='dtValid'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrOc':
nrOc_ = child_.text
nrOc_ = self.gds_validate_string(nrOc_, node, 'nrOc')
self.nrOc = nrOc_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
elif nodeName_ == 'dtValid':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtValid = dval_
# end class TOc
class nrOc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrOc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrOc.subclass:
return nrOc.subclass(*args_, **kwargs_)
else:
return nrOc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrOc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrOc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrOc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrOc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrOc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrOc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrOc
class dtValid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtValid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtValid.subclass:
return dtValid.subclass(*args_, **kwargs_)
else:
return dtValid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtValid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtValid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtValid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtValid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtValid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtValid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtValid
class TCnh(GeneratedsSuper):
"""Cartera Nacional de Habilitação"""
subclass = None
superclass = None
def __init__(self, nrRegCnh=None, dtExped=None, ufCnh=None, dtValid=None, dtPriHab=None, categoriaCnh=None):
self.original_tagname_ = None
self.nrRegCnh = nrRegCnh
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
self.ufCnh = ufCnh
if isinstance(dtValid, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtValid, '%Y-%m-%d').date()
else:
initvalue_ = dtValid
self.dtValid = initvalue_
if isinstance(dtPriHab, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtPriHab, '%Y-%m-%d').date()
else:
initvalue_ = dtPriHab
self.dtPriHab = initvalue_
self.categoriaCnh = categoriaCnh
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TCnh.subclass:
return TCnh.subclass(*args_, **kwargs_)
else:
return TCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRegCnh(self): return self.nrRegCnh
def set_nrRegCnh(self, nrRegCnh): self.nrRegCnh = nrRegCnh
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def get_ufCnh(self): return self.ufCnh
def set_ufCnh(self, ufCnh): self.ufCnh = ufCnh
def get_dtValid(self): return self.dtValid
def set_dtValid(self, dtValid): self.dtValid = dtValid
def get_dtPriHab(self): return self.dtPriHab
def set_dtPriHab(self, dtPriHab): self.dtPriHab = dtPriHab
def get_categoriaCnh(self): return self.categoriaCnh
def set_categoriaCnh(self, categoriaCnh): self.categoriaCnh = categoriaCnh
def hasContent_(self):
if (
self.nrRegCnh is not None or
self.dtExped is not None or
self.ufCnh is not None or
self.dtValid is not None or
self.dtPriHab is not None or
self.categoriaCnh is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TCnh', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TCnh', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRegCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRegCnh>%s</%snrRegCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRegCnh), input_name='nrRegCnh')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
if self.ufCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sufCnh>%s</%sufCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.ufCnh), input_name='ufCnh')), namespace_, eol_))
if self.dtValid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtValid>%s</%sdtValid>%s' % (namespace_, self.gds_format_date(self.dtValid, input_name='dtValid'), namespace_, eol_))
if self.dtPriHab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtPriHab>%s</%sdtPriHab>%s' % (namespace_, self.gds_format_date(self.dtPriHab, input_name='dtPriHab'), namespace_, eol_))
if self.categoriaCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scategoriaCnh>%s</%scategoriaCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.categoriaCnh), input_name='categoriaCnh')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRegCnh':
nrRegCnh_ = child_.text
nrRegCnh_ = self.gds_validate_string(nrRegCnh_, node, 'nrRegCnh')
self.nrRegCnh = nrRegCnh_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
elif nodeName_ == 'ufCnh':
ufCnh_ = child_.text
ufCnh_ = self.gds_validate_string(ufCnh_, node, 'ufCnh')
self.ufCnh = ufCnh_
elif nodeName_ == 'dtValid':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtValid = dval_
elif nodeName_ == 'dtPriHab':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtPriHab = dval_
elif nodeName_ == 'categoriaCnh':
categoriaCnh_ = child_.text
categoriaCnh_ = self.gds_validate_string(categoriaCnh_, node, 'categoriaCnh')
self.categoriaCnh = categoriaCnh_
# end class TCnh
class nrRegCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRegCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRegCnh.subclass:
return nrRegCnh.subclass(*args_, **kwargs_)
else:
return nrRegCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRegCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRegCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRegCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRegCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRegCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRegCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRegCnh
class ufCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ufCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ufCnh.subclass:
return ufCnh.subclass(*args_, **kwargs_)
else:
return ufCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ufCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ufCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ufCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ufCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ufCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ufCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ufCnh
class dtPriHab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtPriHab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtPriHab.subclass:
return dtPriHab.subclass(*args_, **kwargs_)
else:
return dtPriHab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtPriHab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtPriHab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtPriHab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtPriHab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtPriHab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtPriHab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtPriHab
class categoriaCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, categoriaCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if categoriaCnh.subclass:
return categoriaCnh.subclass(*args_, **kwargs_)
else:
return categoriaCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='categoriaCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('categoriaCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='categoriaCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='categoriaCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='categoriaCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='categoriaCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class categoriaCnh
class TEnderecoBrasil(GeneratedsSuper):
"""Informações do Endereço no Brasil"""
subclass = None
superclass = None
def __init__(self, tpLograd=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, cep=None, codMunic=None, uf=None):
self.original_tagname_ = None
self.tpLograd = tpLograd
self.dscLograd = dscLograd
self.nrLograd = nrLograd
self.complemento = complemento
self.bairro = bairro
self.cep = cep
self.codMunic = codMunic
self.uf = uf
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEnderecoBrasil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEnderecoBrasil.subclass:
return TEnderecoBrasil.subclass(*args_, **kwargs_)
else:
return TEnderecoBrasil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpLograd(self): return self.tpLograd
def set_tpLograd(self, tpLograd): self.tpLograd = tpLograd
def get_dscLograd(self): return self.dscLograd
def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd
def get_nrLograd(self): return self.nrLograd
def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd
def get_complemento(self): return self.complemento
def set_complemento(self, complemento): self.complemento = complemento
def get_bairro(self): return self.bairro
def set_bairro(self, bairro): self.bairro = bairro
def get_cep(self): return self.cep
def set_cep(self, cep): self.cep = cep
def get_codMunic(self): return self.codMunic
def set_codMunic(self, codMunic): self.codMunic = codMunic
def get_uf(self): return self.uf
def set_uf(self, uf): self.uf = uf
def hasContent_(self):
if (
self.tpLograd is not None or
self.dscLograd is not None or
self.nrLograd is not None or
self.complemento is not None or
self.bairro is not None or
self.cep is not None or
self.codMunic is not None or
self.uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEnderecoBrasil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoBrasil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoBrasil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoBrasil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoBrasil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoBrasil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpLograd>%s</%stpLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpLograd), input_name='tpLograd')), namespace_, eol_))
if self.dscLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_))
if self.nrLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_))
if self.complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_))
if self.bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_))
if self.cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scep>%s</%scep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cep), input_name='cep')), namespace_, eol_))
if self.codMunic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_))
if self.uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpLograd':
tpLograd_ = child_.text
tpLograd_ = self.gds_validate_string(tpLograd_, node, 'tpLograd')
self.tpLograd = tpLograd_
elif nodeName_ == 'dscLograd':
dscLograd_ = child_.text
dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd')
self.dscLograd = dscLograd_
elif nodeName_ == 'nrLograd':
nrLograd_ = child_.text
nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd')
self.nrLograd = nrLograd_
elif nodeName_ == 'complemento':
complemento_ = child_.text
complemento_ = self.gds_validate_string(complemento_, node, 'complemento')
self.complemento = complemento_
elif nodeName_ == 'bairro':
bairro_ = child_.text
bairro_ = self.gds_validate_string(bairro_, node, 'bairro')
self.bairro = bairro_
elif nodeName_ == 'cep':
cep_ = child_.text
cep_ = self.gds_validate_string(cep_, node, 'cep')
self.cep = cep_
elif nodeName_ == 'codMunic':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codMunic')
self.codMunic = ival_
elif nodeName_ == 'uf':
uf_ = child_.text
uf_ = self.gds_validate_string(uf_, node, 'uf')
self.uf = uf_
# end class TEnderecoBrasil
class tpLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLograd.subclass:
return tpLograd.subclass(*args_, **kwargs_)
else:
return tpLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpLograd
class dscLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscLograd.subclass:
return dscLograd.subclass(*args_, **kwargs_)
else:
return dscLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscLograd
class nrLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrLograd.subclass:
return nrLograd.subclass(*args_, **kwargs_)
else:
return nrLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrLograd
class complemento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, complemento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if complemento.subclass:
return complemento.subclass(*args_, **kwargs_)
else:
return complemento(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='complemento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('complemento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='complemento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='complemento', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='complemento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='complemento', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class complemento
class bairro(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, bairro)
if subclass is not None:
return subclass(*args_, **kwargs_)
if bairro.subclass:
return bairro.subclass(*args_, **kwargs_)
else:
return bairro(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='bairro', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('bairro')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='bairro')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='bairro', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='bairro'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='bairro', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class bairro
class cep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cep.subclass:
return cep.subclass(*args_, **kwargs_)
else:
return cep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cep
class codMunic(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codMunic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codMunic.subclass:
return codMunic.subclass(*args_, **kwargs_)
else:
return codMunic(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codMunic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codMunic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codMunic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codMunic', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codMunic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codMunic', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codMunic
class uf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, uf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if uf.subclass:
return uf.subclass(*args_, **kwargs_)
else:
return uf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='uf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('uf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='uf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='uf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='uf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='uf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class uf
class TEnderecoExterior(GeneratedsSuper):
"""Informações do Endereço no Exterior"""
subclass = None
superclass = None
def __init__(self, paisResid=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, nmCid=None, codPostal=None):
self.original_tagname_ = None
self.paisResid = paisResid
self.dscLograd = dscLograd
self.nrLograd = nrLograd
self.complemento = complemento
self.bairro = bairro
self.nmCid = nmCid
self.codPostal = codPostal
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEnderecoExterior)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEnderecoExterior.subclass:
return TEnderecoExterior.subclass(*args_, **kwargs_)
else:
return TEnderecoExterior(*args_, **kwargs_)
factory = staticmethod(factory)
def get_paisResid(self): return self.paisResid
def set_paisResid(self, paisResid): self.paisResid = paisResid
def get_dscLograd(self): return self.dscLograd
def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd
def get_nrLograd(self): return self.nrLograd
def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd
def get_complemento(self): return self.complemento
def set_complemento(self, complemento): self.complemento = complemento
def get_bairro(self): return self.bairro
def set_bairro(self, bairro): self.bairro = bairro
def get_nmCid(self): return self.nmCid
def set_nmCid(self, nmCid): self.nmCid = nmCid
def get_codPostal(self): return self.codPostal
def set_codPostal(self, codPostal): self.codPostal = codPostal
def hasContent_(self):
if (
self.paisResid is not None or
self.dscLograd is not None or
self.nrLograd is not None or
self.complemento is not None or
self.bairro is not None or
self.nmCid is not None or
self.codPostal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEnderecoExterior', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoExterior')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoExterior')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoExterior', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoExterior'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoExterior', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.paisResid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%spaisResid>%s</%spaisResid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisResid), input_name='paisResid')), namespace_, eol_))
if self.dscLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_))
if self.nrLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_))
if self.complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_))
if self.bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_))
if self.nmCid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmCid>%s</%snmCid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmCid), input_name='nmCid')), namespace_, eol_))
if self.codPostal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodPostal>%s</%scodPostal>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codPostal), input_name='codPostal')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'paisResid':
paisResid_ = child_.text
paisResid_ = self.gds_validate_string(paisResid_, node, 'paisResid')
self.paisResid = paisResid_
elif nodeName_ == 'dscLograd':
dscLograd_ = child_.text
dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd')
self.dscLograd = dscLograd_
elif nodeName_ == 'nrLograd':
nrLograd_ = child_.text
nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd')
self.nrLograd = nrLograd_
elif nodeName_ == 'complemento':
complemento_ = child_.text
complemento_ = self.gds_validate_string(complemento_, node, 'complemento')
self.complemento = complemento_
elif nodeName_ == 'bairro':
bairro_ = child_.text
bairro_ = self.gds_validate_string(bairro_, node, 'bairro')
self.bairro = bairro_
elif nodeName_ == 'nmCid':
nmCid_ = child_.text
nmCid_ = self.gds_validate_string(nmCid_, node, 'nmCid')
self.nmCid = nmCid_
elif nodeName_ == 'codPostal':
codPostal_ = child_.text
codPostal_ = self.gds_validate_string(codPostal_, node, 'codPostal')
self.codPostal = codPostal_
# end class TEnderecoExterior
class paisResid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, paisResid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if paisResid.subclass:
return paisResid.subclass(*args_, **kwargs_)
else:
return paisResid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='paisResid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisResid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisResid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='paisResid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisResid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='paisResid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class paisResid
class nmCid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmCid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmCid.subclass:
return nmCid.subclass(*args_, **kwargs_)
else:
return nmCid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmCid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmCid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmCid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmCid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmCid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmCid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmCid
class codPostal(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codPostal)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codPostal.subclass:
return codPostal.subclass(*args_, **kwargs_)
else:
return codPostal(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codPostal', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codPostal')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codPostal')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codPostal', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codPostal'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codPostal', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codPostal
class TTrabEstrang(GeneratedsSuper):
"""Informações do Trabalhador Estrangeiro"""
subclass = None
superclass = None
def __init__(self, dtChegada=None, classTrabEstrang=None, casadoBr=None, filhosBr=None):
self.original_tagname_ = None
if isinstance(dtChegada, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtChegada, '%Y-%m-%d').date()
else:
initvalue_ = dtChegada
self.dtChegada = initvalue_
self.classTrabEstrang = classTrabEstrang
self.casadoBr = casadoBr
self.filhosBr = filhosBr
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TTrabEstrang)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TTrabEstrang.subclass:
return TTrabEstrang.subclass(*args_, **kwargs_)
else:
return TTrabEstrang(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtChegada(self): return self.dtChegada
def set_dtChegada(self, dtChegada): self.dtChegada = dtChegada
def get_classTrabEstrang(self): return self.classTrabEstrang
def set_classTrabEstrang(self, classTrabEstrang): self.classTrabEstrang = classTrabEstrang
def get_casadoBr(self): return self.casadoBr
def set_casadoBr(self, casadoBr): self.casadoBr = casadoBr
def get_filhosBr(self): return self.filhosBr
def set_filhosBr(self, filhosBr): self.filhosBr = filhosBr
def hasContent_(self):
if (
self.dtChegada is not None or
self.classTrabEstrang is not None or
self.casadoBr is not None or
self.filhosBr is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TTrabEstrang', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TTrabEstrang')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TTrabEstrang')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TTrabEstrang', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TTrabEstrang'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TTrabEstrang', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtChegada is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtChegada>%s</%sdtChegada>%s' % (namespace_, self.gds_format_date(self.dtChegada, input_name='dtChegada'), namespace_, eol_))
if self.classTrabEstrang is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sclassTrabEstrang>%s</%sclassTrabEstrang>%s' % (namespace_, self.gds_format_integer(self.classTrabEstrang, input_name='classTrabEstrang'), namespace_, eol_))
if self.casadoBr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scasadoBr>%s</%scasadoBr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.casadoBr), input_name='casadoBr')), namespace_, eol_))
if self.filhosBr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfilhosBr>%s</%sfilhosBr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.filhosBr), input_name='filhosBr')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtChegada':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtChegada = dval_
elif nodeName_ == 'classTrabEstrang':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'classTrabEstrang')
self.classTrabEstrang = ival_
elif nodeName_ == 'casadoBr':
casadoBr_ = child_.text
casadoBr_ = self.gds_validate_string(casadoBr_, node, 'casadoBr')
self.casadoBr = casadoBr_
elif nodeName_ == 'filhosBr':
filhosBr_ = child_.text
filhosBr_ = self.gds_validate_string(filhosBr_, node, 'filhosBr')
self.filhosBr = filhosBr_
# end class TTrabEstrang
class dtChegada(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtChegada)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtChegada.subclass:
return dtChegada.subclass(*args_, **kwargs_)
else:
return dtChegada(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtChegada', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtChegada')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtChegada')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtChegada', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtChegada'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtChegada', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtChegada
class classTrabEstrang(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, classTrabEstrang)
if subclass is not None:
return subclass(*args_, **kwargs_)
if classTrabEstrang.subclass:
return classTrabEstrang.subclass(*args_, **kwargs_)
else:
return classTrabEstrang(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='classTrabEstrang', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('classTrabEstrang')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='classTrabEstrang')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='classTrabEstrang', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='classTrabEstrang'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='classTrabEstrang', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class classTrabEstrang
class casadoBr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, casadoBr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if casadoBr.subclass:
return casadoBr.subclass(*args_, **kwargs_)
else:
return casadoBr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='casadoBr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('casadoBr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='casadoBr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='casadoBr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='casadoBr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='casadoBr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class casadoBr
class filhosBr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, filhosBr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if filhosBr.subclass:
return filhosBr.subclass(*args_, **kwargs_)
else:
return filhosBr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='filhosBr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('filhosBr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='filhosBr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='filhosBr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='filhosBr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='filhosBr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class filhosBr
class TDependente(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tpDep=None, nmDep=None, dtNascto=None, cpfDep=None, depIRRF=None, depSF=None, incTrab=None):
self.original_tagname_ = None
self.tpDep = tpDep
self.nmDep = nmDep
if isinstance(dtNascto, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtNascto, '%Y-%m-%d').date()
else:
initvalue_ = dtNascto
self.dtNascto = initvalue_
self.cpfDep = cpfDep
self.depIRRF = depIRRF
self.depSF = depSF
self.incTrab = incTrab
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TDependente)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TDependente.subclass:
return TDependente.subclass(*args_, **kwargs_)
else:
return TDependente(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpDep(self): return self.tpDep
def set_tpDep(self, tpDep): self.tpDep = tpDep
def get_nmDep(self): return self.nmDep
def set_nmDep(self, nmDep): self.nmDep = nmDep
def get_dtNascto(self): return self.dtNascto
def set_dtNascto(self, dtNascto): self.dtNascto = dtNascto
def get_cpfDep(self): return self.cpfDep
def set_cpfDep(self, cpfDep): self.cpfDep = cpfDep
def get_depIRRF(self): return self.depIRRF
def set_depIRRF(self, depIRRF): self.depIRRF = depIRRF
def get_depSF(self): return self.depSF
def set_depSF(self, depSF): self.depSF = depSF
def get_incTrab(self): return self.incTrab
def set_incTrab(self, incTrab): self.incTrab = incTrab
def hasContent_(self):
if (
self.tpDep is not None or
self.nmDep is not None or
self.dtNascto is not None or
self.cpfDep is not None or
self.depIRRF is not None or
self.depSF is not None or
self.incTrab is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TDependente', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TDependente')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TDependente')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TDependente', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TDependente'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TDependente', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpDep>%s</%stpDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpDep), input_name='tpDep')), namespace_, eol_))
if self.nmDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmDep>%s</%snmDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmDep), input_name='nmDep')), namespace_, eol_))
if self.dtNascto is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtNascto>%s</%sdtNascto>%s' % (namespace_, self.gds_format_date(self.dtNascto, input_name='dtNascto'), namespace_, eol_))
if self.cpfDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfDep>%s</%scpfDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfDep), input_name='cpfDep')), namespace_, eol_))
if self.depIRRF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdepIRRF>%s</%sdepIRRF>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.depIRRF), input_name='depIRRF')), namespace_, eol_))
if self.depSF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdepSF>%s</%sdepSF>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.depSF), input_name='depSF')), namespace_, eol_))
if self.incTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sincTrab>%s</%sincTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.incTrab), input_name='incTrab')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpDep':
tpDep_ = child_.text
tpDep_ = self.gds_validate_string(tpDep_, node, 'tpDep')
self.tpDep = tpDep_
elif nodeName_ == 'nmDep':
nmDep_ = child_.text
nmDep_ = self.gds_validate_string(nmDep_, node, 'nmDep')
self.nmDep = nmDep_
elif nodeName_ == 'dtNascto':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtNascto = dval_
elif nodeName_ == 'cpfDep':
cpfDep_ = child_.text
cpfDep_ = self.gds_validate_string(cpfDep_, node, 'cpfDep')
self.cpfDep = cpfDep_
elif nodeName_ == 'depIRRF':
depIRRF_ = child_.text
depIRRF_ = self.gds_validate_string(depIRRF_, node, 'depIRRF')
self.depIRRF = depIRRF_
elif nodeName_ == 'depSF':
depSF_ = child_.text
depSF_ = self.gds_validate_string(depSF_, node, 'depSF')
self.depSF = depSF_
elif nodeName_ == 'incTrab':
incTrab_ = child_.text
incTrab_ = self.gds_validate_string(incTrab_, node, 'incTrab')
self.incTrab = incTrab_
# end class TDependente
class tpDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpDep.subclass:
return tpDep.subclass(*args_, **kwargs_)
else:
return tpDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpDep
class nmDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmDep.subclass:
return nmDep.subclass(*args_, **kwargs_)
else:
return nmDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmDep
class dtNascto(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtNascto)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtNascto.subclass:
return dtNascto.subclass(*args_, **kwargs_)
else:
return dtNascto(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtNascto', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtNascto')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtNascto')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtNascto', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtNascto'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtNascto', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtNascto
class cpfDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfDep.subclass:
return cpfDep.subclass(*args_, **kwargs_)
else:
return cpfDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfDep
class depIRRF(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, depIRRF)
if subclass is not None:
return subclass(*args_, **kwargs_)
if depIRRF.subclass:
return depIRRF.subclass(*args_, **kwargs_)
else:
return depIRRF(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='depIRRF', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('depIRRF')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='depIRRF')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='depIRRF', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='depIRRF'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='depIRRF', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class depIRRF
class depSF(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, depSF)
if subclass is not None:
return subclass(*args_, **kwargs_)
if depSF.subclass:
return depSF.subclass(*args_, **kwargs_)
else:
return depSF(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='depSF', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('depSF')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='depSF')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='depSF', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='depSF'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='depSF', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class depSF
class incTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, incTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if incTrab.subclass:
return incTrab.subclass(*args_, **kwargs_)
else:
return incTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='incTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('incTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='incTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='incTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='incTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='incTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class incTrab
class TContato(GeneratedsSuper):
"""Informações de Contato"""
subclass = None
superclass = None
def __init__(self, fonePrinc=None, foneAlternat=None, emailPrinc=None, emailAlternat=None):
self.original_tagname_ = None
self.fonePrinc = fonePrinc
self.foneAlternat = foneAlternat
self.emailPrinc = emailPrinc
self.emailAlternat = emailAlternat
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TContato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TContato.subclass:
return TContato.subclass(*args_, **kwargs_)
else:
return TContato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_fonePrinc(self): return self.fonePrinc
def set_fonePrinc(self, fonePrinc): self.fonePrinc = fonePrinc
def get_foneAlternat(self): return self.foneAlternat
def set_foneAlternat(self, foneAlternat): self.foneAlternat = foneAlternat
def get_emailPrinc(self): return self.emailPrinc
def set_emailPrinc(self, emailPrinc): self.emailPrinc = emailPrinc
def get_emailAlternat(self): return self.emailAlternat
def set_emailAlternat(self, emailAlternat): self.emailAlternat = emailAlternat
def hasContent_(self):
if (
self.fonePrinc is not None or
self.foneAlternat is not None or
self.emailPrinc is not None or
self.emailAlternat is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TContato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TContato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TContato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TContato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TContato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TContato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.fonePrinc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfonePrinc>%s</%sfonePrinc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.fonePrinc), input_name='fonePrinc')), namespace_, eol_))
if self.foneAlternat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfoneAlternat>%s</%sfoneAlternat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.foneAlternat), input_name='foneAlternat')), namespace_, eol_))
if self.emailPrinc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%semailPrinc>%s</%semailPrinc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.emailPrinc), input_name='emailPrinc')), namespace_, eol_))
if self.emailAlternat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%semailAlternat>%s</%semailAlternat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.emailAlternat), input_name='emailAlternat')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'fonePrinc':
fonePrinc_ = child_.text
fonePrinc_ = self.gds_validate_string(fonePrinc_, node, 'fonePrinc')
self.fonePrinc = fonePrinc_
elif nodeName_ == 'foneAlternat':
foneAlternat_ = child_.text
foneAlternat_ = self.gds_validate_string(foneAlternat_, node, 'foneAlternat')
self.foneAlternat = foneAlternat_
elif nodeName_ == 'emailPrinc':
emailPrinc_ = child_.text
emailPrinc_ = self.gds_validate_string(emailPrinc_, node, 'emailPrinc')
self.emailPrinc = emailPrinc_
elif nodeName_ == 'emailAlternat':
emailAlternat_ = child_.text
emailAlternat_ = self.gds_validate_string(emailAlternat_, node, 'emailAlternat')
self.emailAlternat = emailAlternat_
# end class TContato
class fonePrinc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, fonePrinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if fonePrinc.subclass:
return fonePrinc.subclass(*args_, **kwargs_)
else:
return fonePrinc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='fonePrinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('fonePrinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='fonePrinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='fonePrinc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='fonePrinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='fonePrinc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class fonePrinc
class foneAlternat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, foneAlternat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if foneAlternat.subclass:
return foneAlternat.subclass(*args_, **kwargs_)
else:
return foneAlternat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='foneAlternat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('foneAlternat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='foneAlternat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='foneAlternat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='foneAlternat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='foneAlternat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class foneAlternat
class emailPrinc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, emailPrinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if emailPrinc.subclass:
return emailPrinc.subclass(*args_, **kwargs_)
else:
return emailPrinc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='emailPrinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('emailPrinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='emailPrinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='emailPrinc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='emailPrinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='emailPrinc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class emailPrinc
class emailAlternat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, emailAlternat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if emailAlternat.subclass:
return emailAlternat.subclass(*args_, **kwargs_)
else:
return emailAlternat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='emailAlternat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('emailAlternat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='emailAlternat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='emailAlternat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='emailAlternat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='emailAlternat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class emailAlternat
GDSClassesMapping = {
'CNH': TCnh,
'CTPS': TCtps,
'OC': TOc,
'RG': TRg,
'RIC': TRic,
'RNE': TRne,
'brasil': TEnderecoBrasil,
'contato': TContato,
'dependente': TDependente,
'exterior': TEnderecoExterior,
'ideEmpregador': TEmpregador,
'ideEvento': TIdeEveTrab,
'trabEstrangeiro': TTrabEstrang,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from evtAltCadastral import *\n\n')
sys.stdout.write('import evtAltCadastral as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"TCnh",
"TContato",
"TCtps",
"TDependente",
"TEmpregador",
"TEnderecoBrasil",
"TEnderecoExterior",
"TIdeEveTrab",
"TOc",
"TRg",
"TRic",
"TRne",
"TTrabEstrang",
"eSocial"
]
| 42.467809 | 243 | 0.61663 | 32,615 | 314,644 | 5.666013 | 0.021248 | 0.041251 | 0.027322 | 0.031364 | 0.807946 | 0.771474 | 0.751154 | 0.728161 | 0.703074 | 0.668826 | 0 | 0.002497 | 0.275842 | 314,644 | 7,408 | 244 | 42.473542 | 0.808542 | 0.016609 | 0 | 0.684218 | 1 | 0.000145 | 0.041669 | 0.007713 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15507 | false | 0.043975 | 0.040069 | 0.019528 | 0.334587 | 0.084623 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bf3e900d699190d56fa5f59b1abb9cf7aaa6bbd2 | 97 | py | Python | chainer_compiler/elichika/__init__.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | 116 | 2019-01-25T03:54:44.000Z | 2022-03-08T00:11:14.000Z | chainer_compiler/elichika/__init__.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | 431 | 2019-01-25T10:18:44.000Z | 2020-06-17T05:28:55.000Z | chainer_compiler/elichika/__init__.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | 26 | 2019-01-25T07:21:09.000Z | 2021-11-26T04:24:35.000Z | from chainer_compiler.elichika.chainer2onnx import compile_model, save_model, save_model_as_text
| 48.5 | 96 | 0.896907 | 14 | 97 | 5.785714 | 0.785714 | 0.222222 | 0.345679 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010989 | 0.061856 | 97 | 1 | 97 | 97 | 0.879121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bf78ac1262bf08f764899a4a4301e9b533f4e92e | 20,643 | py | Python | objects.py | juno-r1/ship-game | 0e9c6fff6d3897538207de7488975a75b20b1ad2 | [
"CC0-1.0"
] | null | null | null | objects.py | juno-r1/ship-game | 0e9c6fff6d3897538207de7488975a75b20b1ad2 | [
"CC0-1.0"
] | null | null | null | objects.py | juno-r1/ship-game | 0e9c6fff6d3897538207de7488975a75b20b1ad2 | [
"CC0-1.0"
] | null | null | null | import engine
import keyboard
import math
class Shooter:
def shoot(self):
pass
#if self.cooldown_timer > 1:
# self.cooldown_timer -= 1
#else:
# bullet = Bullet("sprites/bullet.txt", self.scene, x, y, self)
# bullet.set_x(x_vector)
# bullet.set_y(y_vector)
# self.cooldown_timer = self.cooldown
class Player(engine.PhysicsObject, Shooter):
def __init__(self, sprite, scene, x, y):
super().__init__(sprite, scene, x, y)
self.health = 100
self.i_frames = 0.5 * scene.fps
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 20 / scene.fps # Per second, fps-independent
self.sunk = False
self.sink_cooldown = 0.1 * scene.fps
self.sink_timer = 0.1 * scene.fps # This one ticks down
def up(self):
self.x -= self.speed
def down(self):
self.x += self.speed
def left(self):
self.y -= self.speed
def right (self):
self.y += self.speed
def sink(self): # Reverses sunk state
if self.sink_timer > 1:
self.sink_timer -= 1
else:
if self.sunk == False:
self.sunk = True
with open('sprites/player_sunk.txt', 'r') as file: # Opens the file containing the sprite information
self.sprite = file.read().splitlines() # Basically readlines() without \n
else:
self.sunk = False
with open('sprites/player.txt', 'r') as file: # Opens the file containing the sprite information
self.sprite = file.read().splitlines() # Basically readlines() without \n
self.sink_timer = self.sink_cooldown
def player_input(self, renderer):
if keyboard.is_pressed('w') and self.x > 0:
self.up()
if keyboard.is_pressed('s') and self.x + self.size[0] < renderer.x_res:
self.down()
if keyboard.is_pressed('a') and self.y > 0:
self.left()
if keyboard.is_pressed('d') and self.y + self.size[1] < renderer.y_res:
self.right()
if keyboard.is_pressed('space'):
self.sink()
def collision(self):
hitbox = self.get_location() # Creates a list of cells the player occupies
for object in engine.ScreenObject.instances:
if object is not self and not (isinstance(object, Bullet) and object.friendly):
for point in object.get_location():
if point in hitbox:
if isinstance(object, Bullet):
self.scene.destroy(object)
return True
else:
return False
def damage(self, renderer):
if self.i_frames > 0:
self.i_frames -= 1
elif self.collision():
self.health -= 1
self.i_frames == 0.5 * self.scene.fps
if self.health == 0:
self.scene.destroy(self)
self.scene.active = False
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
bullet = Bullet("sprites/bullet.txt", self.scene, self.x - 1, self.y + 1, self)
bullet.set_x(-20 / self.scene.fps)
bullet.set_y(0)
self.cooldown_timer = self.cooldown
class Bullet(engine.PhysicsObject):
def __init__(self, sprite, scene, x, y, shooter):
super().__init__(sprite, scene, x, y)
self.friendly = None
if isinstance(shooter, Player): # Disables friendly fire
self.friendly = True
else:
self.friendly = False
class Enemy(engine.PhysicsObject, Shooter):
def __init__(self, sprite, scene, x, y):
super().__init__(sprite, scene, x, y)
self.health = 10
self.i_frames = 0
self.cooldown = 1
self.speed = 10 / scene.fps
def state(self): # Rudimentary state machine to be overridden by subclasses
return None
def collision(self):
hitbox = self.get_location() # Creates a list of cells the enemy occupies
for object in engine.ScreenObject.instances:
if object is not self and not (isinstance(object, Bullet) and not object.friendly):
for point in object.get_location():
if point in hitbox and not (isinstance(object, Player) and not object.sunk): # Proper hit detection for player
if isinstance(object, Bullet):
self.scene.destroy(object)
return True
else:
return False
def damage(self, renderer):
if self.i_frames > 0:
self.i_frames -= 1
elif self.collision():
self.health -= 1
self.i_frames == 0.5 * self.scene.fps
if self.health == 0:
self.scene.destroy(self)
class Corvette(Enemy):
def __init__(self, sprite, scene, x, y, state):
super().__init__(sprite, scene, x, y)
self.health = 1
self.i_frames = 0
self.cooldown = 0.25 * scene.fps
self.cooldown_timer = 0.25 * scene.fps
self.speed = 10 / scene.fps
self.instruction = state
def state(self):
return eval('self.' + self.instruction + '()')
def descend(self):
self.set_x(self.speed)
def swarm(self):
self.set_x(self.speed)
self.set_y(math.sin(self.x))
def nothing(self):
pass
class Destroyer(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state):
super().__init__(sprite, scene, x, y)
self.health = 5
self.i_frames = 0
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 5 / scene.fps
self.instruction = state
def state(self):
return eval('self.' + self.instruction + '()')
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
bullet_nw = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y, self)
bullet_nw.set_x(20 / self.scene.fps)
bullet_nw.set_y(-20 / self.scene.fps)
bullet_sw = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y, self)
bullet_sw.set_x(20 / self.scene.fps)
bullet_sw.set_y(-20 / self.scene.fps)
bullet_ne = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y + 2, self)
bullet_ne.set_x(20 / self.scene.fps)
bullet_ne.set_y(20 / self.scene.fps)
bullet_se = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y + 2, self)
bullet_se.set_x(20 / self.scene.fps)
bullet_se.set_y(20 / self.scene.fps)
self.cooldown_timer = self.cooldown
def descend(self):
self.set_x(self.speed)
def swarm(self):
self.set_x(self.speed)
self.set_y(math.sin(self.x * 2))
def nothing(self):
pass
class Cruiser(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state):
super().__init__(sprite, scene, x, y)
self.health = 20
self.i_frames = 0
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 2 / scene.fps
self.instruction = state
def state(self):
return eval('self.' + self.instruction + '()')
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
bullet_nw = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y, self)
bullet_nw.set_x(20 / self.scene.fps)
bullet_nw.set_y(-20 / self.scene.fps)
bullet_smw = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y, self)
bullet_smw.set_x(20 / self.scene.fps)
bullet_smw.set_y(-20 / self.scene.fps)
bullet_sw = Bullet("sprites/bullet.txt", self.scene, self.x + 5, self.y, self)
bullet_sw.set_x(20 / self.scene.fps)
bullet_sw.set_y(-20 / self.scene.fps)
bullet_ne = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y + 2, self)
bullet_ne.set_x(20 / self.scene.fps)
bullet_ne.set_y(20 / self.scene.fps)
bullet_sme = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y + 2, self)
bullet_sme.set_x(20 / self.scene.fps)
bullet_sme.set_y(20 / self.scene.fps)
bullet_se = Bullet("sprites/bullet.txt", self.scene, self.x + 5, self.y + 2, self)
bullet_se.set_x(20 / self.scene.fps)
bullet_se.set_y(20 / self.scene.fps)
self.cooldown_timer = self.cooldown
def descend(self):
self.set_x(self.speed)
def swarm(self):
self.set_x(self.speed)
self.set_y(math.sin(self.x * 2))
def nothing(self):
pass
class Ornstein(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state, player):
super().__init__(sprite, scene, x, y)
self.health = 50
self.i_frames = 0
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 30 / scene.fps
self.instruction = state
self.base_x = x
self.ability = 5 * scene.fps # In seconds, fps-independent
self.ability_timer = 5 * scene.fps # This one ticks down
self.ability_active = False
def state(self, player):
return eval('self.{0}(player)'.format(self.instruction))
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
bullet_nw = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y, self)
bullet_nw.set_x(20 / self.scene.fps)
bullet_nw.set_y(-20 / self.scene.fps)
bullet_sw = Bullet("sprites/bullet.txt", self.scene, self.x + 5, self.y, self)
bullet_sw.set_x(20 / self.scene.fps)
bullet_sw.set_y(-20 / self.scene.fps)
bullet_ne = Bullet("sprites/bullet.txt", self.scene, self.x + 1, self.y + 2, self)
bullet_ne.set_x(20 / self.scene.fps)
bullet_ne.set_y(20 / self.scene.fps)
bullet_se = Bullet("sprites/bullet.txt", self.scene, self.x + 5, self.y + 2, self)
bullet_se.set_x(20 / self.scene.fps)
bullet_se.set_y(20 / self.scene.fps)
self.cooldown_timer = self.cooldown
def pursue(self, player):
if self.ability_timer > 1 and self.ability_active == False:
self.ability_timer -= 1
if self.x > self.base_x:
self.set_x(-0.5 * self.speed)
else:
self.set_x(0)
self.y = player.y
else:
self.ability_active = True
if self.x < player.x:
self.set_x(self.speed)
else:
self.set_x(0)
self.ability_active = False
self.ability_timer = self.ability
def collision(self):
hitbox = self.get_location() # Creates a list of cells the enemy occupies
for object in engine.ScreenObject.instances:
if object is not self and not (isinstance(object, Bullet) and not object.friendly):
for point in object.get_location():
if point in hitbox and not (isinstance(object, Player)): # Proper hit detection for player
if isinstance(object, Bullet):
self.scene.destroy(object)
return True
else:
return False
def damage(self, renderer):
if self.i_frames > 0:
self.i_frames -= 1
elif self.collision():
self.health -= 1
self.i_frames == 0.25 * self.scene.fps
if self.health == 0:
self.scene.destroy(self)
class Smough(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state, player):
super().__init__(sprite, scene, x, y)
self.health = 50
self.i_frames = 0
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 15 / scene.fps
self.instruction = state
self.base_x = x
self.ability = 5 * scene.fps # In seconds, fps-independent
self.ability_timer = 5 * scene.fps # This one ticks down
self.ability_stage = 0
def state(self, player):
return eval('self.{0}(player)'.format(self.instruction))
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
bullet_nw = Bullet("sprites/bullet.txt", self.scene, self.x + 2, self.y, self)
bullet_nw.set_x(20 / self.scene.fps)
bullet_nw.set_y(-20 / self.scene.fps)
bullet_smw = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y, self)
bullet_smw.set_x(20 / self.scene.fps)
bullet_smw.set_y(-20 / self.scene.fps)
bullet_sw = Bullet("sprites/bullet.txt", self.scene, self.x + 4, self.y, self)
bullet_sw.set_x(20 / self.scene.fps)
bullet_sw.set_y(-20 / self.scene.fps)
bullet_ne = Bullet("sprites/bullet.txt", self.scene, self.x + 2, self.y + 6, self)
bullet_ne.set_x(20 / self.scene.fps)
bullet_ne.set_y(20 / self.scene.fps)
bullet_sme = Bullet("sprites/bullet.txt", self.scene, self.x + 3, self.y + 6, self)
bullet_sme.set_x(20 / self.scene.fps)
bullet_sme.set_y(20 / self.scene.fps)
bullet_se = Bullet("sprites/bullet.txt", self.scene, self.x + 4, self.y + 6, self)
bullet_se.set_x(20 / self.scene.fps)
bullet_se.set_y(20 / self.scene.fps)
self.cooldown_timer = self.cooldown
def pursue(self, player):
if self.ability_timer > 1 and self.ability_stage == 0:
self.ability_timer -= 1
else:
if self.ability_stage == 0:
self.ability_stage = 1
elif self.ability_stage == 1:
if self.x < player.x:
self.set_x(self.speed)
else:
self.set_x(0)
self.ability_stage = 2
elif self.ability_stage == 2:
if self.y < player.y and not self.collision():
self.set_y(2 * self.speed)
elif self.y > player.y and not self.collision():
self.set_y(-2 * self.speed)
else:
self.set_y(0)
self.ability_stage = 3
elif self.ability_stage == 3:
if self.x > self.base_x:
self.set_x(-1 * self.speed)
else:
self.set_x(0)
self.ability_timer = self.ability
self.ability_stage = 0
def collision(self):
hitbox = self.get_location() # Creates a list of cells the enemy occupies
for object in engine.ScreenObject.instances:
if object is not self and not (isinstance(object, Bullet) and not object.friendly):
for point in object.get_location():
if point in hitbox and not (isinstance(object, Player)): # Proper hit detection for player
if isinstance(object, Bullet):
self.scene.destroy(object)
return True
else:
return False
def damage(self, renderer):
if self.i_frames > 0:
self.i_frames -= 1
elif self.collision():
self.health -= 1
self.i_frames == 0.25 * self.scene.fps
if self.health == 0:
self.scene.destroy(self)
class Thatcher(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state, player):
super().__init__(sprite, scene, x, y)
self.health = 500
self.i_frames = 0
self.cooldown = 0.50 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.50 * scene.fps # This one ticks down
self.instruction = state
def state(self, player):
return eval('self.{0}(player)'.format(self.instruction))
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
if self.scene.frame % 3 == 0:
for y in range(1, 40, 4):
bullet = Bullet("sprites/bullet.txt", self.scene, self.size[0], self.y + y, self)
bullet.set_x(20 / self.scene.fps)
else:
self.cooldown_timer = self.cooldown
else:
for y in range(2, 40, 4):
bullet = Bullet("sprites/bullet.txt", self.scene, self.size[0], self.y + y, self)
bullet.set_x(20 / self.scene.fps)
else:
self.cooldown_timer = self.cooldown
def nothing(self, player):
pass
def collision(self):
hitbox = self.get_location() # Creates a list of cells the enemy occupies
for object in engine.ScreenObject.instances:
if object is not self and not (isinstance(object, Bullet) and not object.friendly):
for point in object.get_location():
if point in hitbox and not (isinstance(object, (Player, ThatcherCannon))): # Proper hit detection for player
if isinstance(object, Bullet):
self.scene.destroy(object)
return True
else:
return False
def damage(self, renderer):
if self.i_frames > 0:
self.i_frames -= 1
elif self.collision():
self.health -= 1
self.i_frames == 0.25 * self.scene.fps
if self.health == 0:
self.scene.destroy(self)
class ThatcherCannon(Enemy, Shooter):
def __init__(self, sprite, scene, x, y, state, player):
super().__init__(sprite, scene, x, y)
self.cooldown = 0.25 * scene.fps # In seconds, fps-independent
self.cooldown_timer = 0.25 * scene.fps # This one ticks down
self.speed = 30 / scene.fps
self.instruction = state
self.base_x = x
self.ability = 10 * scene.fps # In seconds, fps-independent
self.ability_timer = 10 * scene.fps # This one ticks down
self.ability_active = False
def state(self, player):
return eval('self.{0}(player)'.format(self.instruction))
def shoot(self):
if self.cooldown_timer > 1:
self.cooldown_timer -= 1
else:
for y in range(1, 5):
bullet = Bullet("sprites/bullet.txt", self.scene, self.x, self.y + y, self)
bullet.set_x(50 / self.scene.fps)
else:
self.cooldown_timer = self.cooldown
def pursue(self, player):
if self.ability_timer > 1 and self.ability_active == False:
self.ability_timer -= 1
if self.x > self.base_x:
self.set_x(-0.5 * self.speed)
else:
self.set_x(0)
self.y = player.y
else:
self.ability_active = True
if self.ability_timer < self.ability:
self.shoot()
self.ability_timer += 5
else:
self.ability_active = False | 35.714533 | 131 | 0.535726 | 2,614 | 20,643 | 4.10482 | 0.059296 | 0.072134 | 0.0548 | 0.056104 | 0.891985 | 0.873532 | 0.844828 | 0.827586 | 0.824977 | 0.81137 | 0 | 0.024617 | 0.358475 | 20,643 | 578 | 132 | 35.714533 | 0.785623 | 0.065494 | 0 | 0.760181 | 0 | 0 | 0.030477 | 0.001232 | 0 | 0 | 0 | 0 | 0 | 1 | 0.124434 | false | 0.011312 | 0.006787 | 0.0181 | 0.196833 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bf9ad1dff3fecc7c60e6349ef65281b706c53a4c | 164 | py | Python | package_archetype/hello_world.py | chrisoliver345/python-processing-package | ebbcde505eda3474d2c60dd4fbdd66cfdacd7928 | [
"MIT"
] | 3 | 2018-01-10T12:41:42.000Z | 2021-08-19T01:58:51.000Z | package_archetype/hello_world.py | chrisoliver345/python-processing-package | ebbcde505eda3474d2c60dd4fbdd66cfdacd7928 | [
"MIT"
] | null | null | null | package_archetype/hello_world.py | chrisoliver345/python-processing-package | ebbcde505eda3474d2c60dd4fbdd66cfdacd7928 | [
"MIT"
] | 6 | 2017-10-19T19:36:17.000Z | 2020-09-27T04:39:29.000Z | HELLO_WORLD_MESSAGE = 'Hello world! PyOhio Demo - 3! CLEpy'
def get_message():
return HELLO_WORLD_MESSAGE
def print_hello_world():
print(get_message())
| 16.4 | 59 | 0.731707 | 23 | 164 | 4.869565 | 0.478261 | 0.357143 | 0.303571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007353 | 0.170732 | 164 | 9 | 60 | 18.222222 | 0.816176 | 0 | 0 | 0 | 0 | 0 | 0.213415 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.6 | 0.4 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
44a8a96dc55bac3d57c6cf03dc31545457fd9704 | 52,556 | py | Python | octopus_deploy_swagger_client/octopus_deploy_client/proxies_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | octopus_deploy_swagger_client/octopus_deploy_client/proxies_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | octopus_deploy_swagger_client/octopus_deploy_client/proxies_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from octopus_deploy_swagger_client.api_client import ApiClient
class ProxiesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_response_descriptor_machines_proxy_proxy_resource(self, **kwargs): # noqa: E501
"""Create a ProxyResource # noqa: E501
Creates a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_proxy_proxy_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
return data
def create_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, **kwargs): # noqa: E501
"""Create a ProxyResource # noqa: E501
Creates a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_proxy_proxy_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['proxy_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'proxy_resource' in params:
body_params = params['proxy_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Create a ProxyResource # noqa: E501
Creates a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def create_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Create a ProxyResource # noqa: E501
Creates a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'proxy_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `create_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'proxy_resource' in params:
body_params = params['proxy_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_machines_proxy_proxy_resource(self, id, **kwargs): # noqa: E501
"""Delete a ProxyResource by ID # noqa: E501
Deletes an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_proxy_proxy_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a ProxyResource by ID # noqa: E501
Deletes an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_machines_proxy_proxy_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ProxyResource by ID # noqa: E501
Deletes an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ProxyResource by ID # noqa: E501
Deletes an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_machines_proxy_proxy_resource(self, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists all of the proxies in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_machines_proxy_proxy_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
return data
def index_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists all of the proxies in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_machines_proxy_proxy_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists all of the proxies in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def index_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists all of the proxies in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `index_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_machines_proxy_proxy_resource(self, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists the name and ID of all of the proxies in the supplied Octopus Deploy Space. The results will be sorted by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_proxy_proxy_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[ProxyResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_machines_proxy_proxy_resource_with_http_info(**kwargs) # noqa: E501
return data
def list_all_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists the name and ID of all of the proxies in the supplied Octopus Deploy Space. The results will be sorted by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_proxy_proxy_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[ProxyResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProxyResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists the name and ID of all of the proxies in the supplied Octopus Deploy Space. The results will be sorted by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[ProxyResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def list_all_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProxyResources # noqa: E501
Lists the name and ID of all of the proxies in the supplied Octopus Deploy Space. The results will be sorted by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[ProxyResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `list_all_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProxyResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_machines_proxy_proxy_resource(self, id, **kwargs): # noqa: E501
"""Get a ProxyResource by ID # noqa: E501
Gets a proxy by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_proxy_proxy_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to load (required)
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def load_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a ProxyResource by ID # noqa: E501
Gets a proxy by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to load (required)
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_machines_proxy_proxy_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a ProxyResource by ID # noqa: E501
Gets a proxy by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to load (required)
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def load_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a ProxyResource by ID # noqa: E501
Gets a proxy by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to load (required)
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `load_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_machines_proxy_proxy_resource(self, id, **kwargs): # noqa: E501
"""Modify a ProxyResource by ID # noqa: E501
Modifies an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_proxy_proxy_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to modify (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_machines_proxy_proxy_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Modify a ProxyResource by ID # noqa: E501
Modifies an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_proxy_proxy_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProxyResource to modify (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'proxy_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_machines_proxy_proxy_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_machines_proxy_proxy_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'proxy_resource' in params:
body_params = params['proxy_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/proxies/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_machines_proxy_proxy_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ProxyResource by ID # noqa: E501
Modifies an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_proxy_proxy_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to modify (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ProxyResource by ID # noqa: E501
Modifies an existing proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_machines_proxy_proxy_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProxyResource to modify (required)
:param ProxyResource proxy_resource: The ProxyResource resource to create
:return: ProxyResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id', 'proxy_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_machines_proxy_proxy_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `modify_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_machines_proxy_proxy_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'proxy_resource' in params:
body_params = params['proxy_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/proxies/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProxyResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.542668 | 185 | 0.643333 | 6,160 | 52,556 | 5.176786 | 0.031494 | 0.043401 | 0.078272 | 0.093324 | 0.983913 | 0.983913 | 0.983913 | 0.980338 | 0.979711 | 0.979303 | 0 | 0.014787 | 0.275535 | 52,556 | 1,206 | 186 | 43.578773 | 0.822745 | 0.33157 | 0 | 0.825886 | 1 | 0 | 0.210031 | 0.078831 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038521 | false | 0 | 0.006163 | 0 | 0.101695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
44c43befe0c4020c8e16bed956c8022cdffe4377 | 150 | py | Python | L1Trigger/RPCTrigger/python/rpcTriggerDigis_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | L1Trigger/RPCTrigger/python/rpcTriggerDigis_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | L1Trigger/RPCTrigger/python/rpcTriggerDigis_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
from L1Trigger.RPCTrigger.rpcTriggerDigis_cfi import *
from L1Trigger.RPCTrigger.RPCConeConfig_cff import *
| 30 | 54 | 0.86 | 18 | 150 | 7.055556 | 0.722222 | 0.204724 | 0.362205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014599 | 0.086667 | 150 | 4 | 55 | 37.5 | 0.912409 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
44f077f2204c2e042234db481e33fa100bfbc8ee | 5,867 | py | Python | project/example/core/tests/test_metadata.py | ResonantGeoData/django-large-image | 43c9ef62d83c4d6f33d3b49cc41f2c678f296584 | [
"Apache-2.0"
] | 4 | 2021-12-19T09:26:39.000Z | 2022-02-14T07:34:39.000Z | project/example/core/tests/test_metadata.py | ResonantGeoData/django-large-image | 43c9ef62d83c4d6f33d3b49cc41f2c678f296584 | [
"Apache-2.0"
] | 19 | 2021-12-19T10:01:25.000Z | 2022-03-31T21:40:12.000Z | project/example/core/tests/test_metadata.py | ResonantGeoData/django-large-image | 43c9ef62d83c4d6f33d3b49cc41f2c678f296584 | [
"Apache-2.0"
] | null | null | null | import pytest
from rest_framework import status
@pytest.mark.django_db(transaction=True)
def test_metadata(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(
f'/api/image-file/{image_file_geotiff.pk}/info/metadata?projection=EPSG:3857'
)
assert status.is_success(response.status_code)
metadata = response.data
assert metadata['geospatial']
assert metadata['levels'] == 9
assert metadata['sizeX'] == metadata['sizeY']
assert metadata['tileWidth'] == metadata['tileHeight']
assert metadata['tileWidth'] == metadata['tileHeight']
@pytest.mark.django_db(transaction=True)
def test_metadata_vsi(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(
f'/api/vsi-image-file/{image_file_geotiff.pk}/info/metadata?projection=EPSG:3857'
)
assert status.is_success(response.status_code)
metadata = response.data
assert metadata['geospatial']
assert metadata['levels'] == 9
assert metadata['sizeX'] == metadata['sizeY']
assert metadata['tileWidth'] == metadata['tileHeight']
assert metadata['tileWidth'] == metadata['tileHeight']
@pytest.mark.django_db(transaction=True)
def test_metadata_s3(authenticated_api_client, s3_image_file_geotiff):
response = authenticated_api_client.get(
f'/api/s3-image-file/{s3_image_file_geotiff.pk}/info/metadata?projection=EPSG:3857'
)
assert status.is_success(response.status_code)
metadata = response.data
assert metadata['geospatial']
assert metadata['levels'] == 9
assert metadata['sizeX'] == metadata['sizeY']
assert metadata['tileWidth'] == metadata['tileHeight']
assert metadata['tileWidth'] == metadata['tileHeight']
@pytest.mark.django_db(transaction=True)
def test_metadata_s3_vsi(authenticated_api_client, s3_image_file_geotiff):
response = authenticated_api_client.get(
f'/api/s3-vsi-image-file/{s3_image_file_geotiff.pk}/info/metadata?projection=EPSG:3857'
)
assert status.is_success(response.status_code)
metadata = response.data
assert metadata['geospatial']
assert metadata['levels'] == 9
assert metadata['sizeX'] == metadata['sizeY']
assert metadata['tileWidth'] == metadata['tileHeight']
assert metadata['tileWidth'] == metadata['tileHeight']
@pytest.mark.django_db(transaction=True)
def test_metadata_internal(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(
f'/api/image-file/{image_file_geotiff.pk}/info/metadata_internal'
)
assert status.is_success(response.status_code)
metadata = response.data
assert metadata['geospatial']
assert metadata['driverLongName']
@pytest.mark.django_db(transaction=True)
def test_bands(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(f'/api/image-file/{image_file_geotiff.pk}/info/bands')
assert status.is_success(response.status_code)
bands = response.data
assert isinstance(bands[1], dict)
@pytest.mark.django_db(transaction=True)
def test_frames(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(f'/api/image-file/{image_file_geotiff.pk}/info/frames')
assert status.is_success(response.status_code)
data = response.data
assert isinstance(data['frames'], list)
assert isinstance(data['frames'][0], dict)
assert 'bands' in data['frames'][0]
@pytest.mark.django_db(transaction=True)
def test_band(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(
f'/api/image-file/{image_file_geotiff.pk}/info/band?band=1'
)
assert status.is_success(response.status_code)
band = response.data
assert band['interpretation']
@pytest.mark.django_db(transaction=True)
def test_metadata_ome(authenticated_api_client, ome_image):
response = authenticated_api_client.get(
f'/api/image-file/{ome_image.pk}/info/metadata?source=ometiff'
)
assert status.is_success(response.status_code)
metadata = response.data
assert 'frames' in metadata
assert len(metadata['frames'])
assert not metadata['geospatial']
assert metadata['sizeX'] == metadata['sizeY']
assert metadata['tileWidth'] == metadata['tileHeight']
assert metadata['tileWidth'] == metadata['tileHeight']
@pytest.mark.django_db(transaction=True)
def test_bad_source(authenticated_api_client, image_file_geotiff):
response = authenticated_api_client.get(
f'/api/image-file/{image_file_geotiff.pk}/info/metadata?source=foo'
)
assert status.is_client_error(response.status_code)
@pytest.mark.django_db(transaction=True)
def test_bad_image_data(authenticated_api_client, lonely_header_file):
# Catches server error safely and returns 500-level APIException
response = authenticated_api_client.get(
f'/api/image-file/{lonely_header_file.pk}/info/metadata'
)
assert status.is_server_error(response.status_code)
@pytest.mark.django_db(transaction=True)
def test_tiffdump(authenticated_api_client, s3_image_file_geotiff, png_image):
response = authenticated_api_client.get(
f'/api/s3-image-file/{s3_image_file_geotiff.pk}/info/tiffdump'
)
assert status.is_success(response.status_code)
dump = response.data
assert 'firstifd' in dump
assert 'size' in dump
assert dump['ifds']
# Server error raised when image isn't accessible locally
response = authenticated_api_client.get(
f'/api/s3-vsi-image-file/{s3_image_file_geotiff.pk}/info/tiffdump'
)
assert status.is_server_error(response.status_code)
# Client error raised when image is not a tiff
response = authenticated_api_client.get(f'/api/image-file/{png_image.pk}/info/tiffdump')
assert status.is_client_error(response.status_code)
| 38.598684 | 99 | 0.747571 | 754 | 5,867 | 5.583554 | 0.114058 | 0.074822 | 0.135867 | 0.099762 | 0.823753 | 0.823753 | 0.820428 | 0.777197 | 0.719715 | 0.662708 | 0 | 0.007883 | 0.135163 | 5,867 | 151 | 100 | 38.854305 | 0.821837 | 0.027783 | 0 | 0.54918 | 0 | 0.02459 | 0.224561 | 0.15386 | 0 | 0 | 0 | 0 | 0.409836 | 1 | 0.098361 | false | 0 | 0.016393 | 0 | 0.114754 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
44f08b1b2bd27868d7c54568e78abbd8698b52df | 9,168 | py | Python | src/tt_timers/tt_timers/tests/test_handlers.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | 85 | 2017-11-21T12:22:02.000Z | 2022-03-27T23:07:17.000Z | src/tt_timers/tt_timers/tests/test_handlers.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | 545 | 2017-11-04T14:15:04.000Z | 2022-03-27T14:19:27.000Z | src/tt_timers/tt_timers/tests/test_handlers.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | 45 | 2017-11-11T12:36:30.000Z | 2022-02-25T06:10:44.000Z | import datetime
from aiohttp import test_utils
from tt_protocol.protocol import timers_pb2
from tt_web import postgresql as db
from .. import operations
from . import helpers
class CreateTimerTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_new_timer(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
resources=66,
callback_data='abc').SerializeToString())
answer = await self.check_success(request, timers_pb2.CreateTimerResponse)
queue_timer_id, queue_finish_at = operations.TIMERS_QUEUE.first()
self.assertEqual(answer.timer.id, queue_timer_id)
self.assertEqual(datetime.datetime.fromtimestamp(answer.timer.finish_at), queue_finish_at)
results = await db.sql('SELECT * FROM timers')
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['id'], answer.timer.id)
self.assertEqual(results[0]['owner'], 667)
self.assertEqual(results[0]['entity'], 777)
self.assertEqual(results[0]['type'], 3)
self.assertEqual(results[0]['speed'], 4)
self.assertEqual(results[0]['border'], 500)
self.assertEqual(results[0]['resources'], 66)
self.assertEqual(results[0]['finish_at'].replace(tzinfo=None), datetime.datetime.fromtimestamp(answer.timer.finish_at))
self.assertEqual(results[0]['data'], {'callback_data': 'abc'})
@test_utils.unittest_run_loop
async def test_duplicate_timer(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
resources=0,
callback_data='abc').SerializeToString())
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
callback_data='abc').SerializeToString())
await self.check_error(request, error='timers.create_timer.duplicate_timer')
results = await db.sql('SELECT * FROM timers')
self.assertEqual(len(results), 1)
@test_utils.unittest_run_loop
async def test_undefined_type(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=76574,
speed=4,
border=500,
callback_data='abc').SerializeToString())
await self.check_error(request, error='timers.create_timer.unknown_type')
class ChangeSpeedTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_success(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
resources=66,
callback_data='abc').SerializeToString())
request = await self.client.post('/change-speed', data=timers_pb2.ChangeSpeedRequest(owner_id=667,
entity_id=777,
type=3,
speed=40).SerializeToString())
answer = await self.check_success(request, timers_pb2.ChangeSpeedResponse)
queue_timer_id, queue_finish_at = operations.TIMERS_QUEUE.first()
self.assertEqual(answer.timer.id, queue_timer_id)
self.assertEqual(datetime.datetime.fromtimestamp(answer.timer.finish_at), queue_finish_at)
results = await db.sql('SELECT * FROM timers')
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['id'], answer.timer.id)
self.assertEqual(results[0]['owner'], 667)
self.assertEqual(results[0]['entity'], 777)
self.assertEqual(results[0]['type'], 3)
self.assertEqual(results[0]['speed'], 40)
@test_utils.unittest_run_loop
async def test_no_timer_found(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
resources=66,
callback_data='abc').SerializeToString())
request = await self.client.post('/change-speed', data=timers_pb2.ChangeSpeedRequest(owner_id=667,
entity_id=888,
type=3,
speed=40).SerializeToString())
await self.check_error(request, error='timers.change_speed.timer_not_found')
@test_utils.unittest_run_loop
async def test_undefined_type(self):
request = await self.client.post('/create-timer', data=timers_pb2.CreateTimerRequest(owner_id=667,
entity_id=777,
type=3,
speed=4,
border=500,
callback_data='abc').SerializeToString())
request = await self.client.post('/change-speed', data=timers_pb2.ChangeSpeedRequest(owner_id=667,
entity_id=777,
type=4,
speed=40).SerializeToString())
await self.check_error(request, error='timers.change_speed.unknown_type')
| 64.111888 | 134 | 0.373037 | 623 | 9,168 | 5.311396 | 0.133226 | 0.095195 | 0.093079 | 0.09731 | 0.857359 | 0.855243 | 0.855243 | 0.840737 | 0.818979 | 0.776367 | 0 | 0.041296 | 0.558901 | 9,168 | 142 | 135 | 64.56338 | 0.776954 | 0 | 0 | 0.745455 | 0 | 0 | 0.047229 | 0.014616 | 0 | 0 | 0 | 0 | 0.190909 | 1 | 0 | false | 0 | 0.054545 | 0 | 0.072727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
780856e9065679ff03a044bfd347af814cabc587 | 23,296 | py | Python | BaseDao.py | emaste-r/torMysqlDao | 878a72afdf85c719b89e748dac6d3708e4ab8f59 | [
"MIT"
] | 6 | 2018-02-02T07:31:40.000Z | 2018-02-02T08:53:24.000Z | BaseDao.py | emaste-r/torBizMysqlDao | 878a72afdf85c719b89e748dac6d3708e4ab8f59 | [
"MIT"
] | null | null | null | BaseDao.py | emaste-r/torBizMysqlDao | 878a72afdf85c719b89e748dac6d3708e4ab8f59 | [
"MIT"
] | 2 | 2018-05-20T02:48:47.000Z | 2019-06-06T01:31:25.000Z | # coding=utf-8
import logging
import tornado.gen
class BaseDao(object):
DataInfo = None
table_name = '' # 数据库表名
escape_list = [] # 需要转义的list
quot_list = [] # 需要带引号的list
not_append_list = [] # int list,但是不可能有append操作的list,如 img_id
append_list = [] # int list, 但是可能有append操作的list,如add_cnt, view_cnt
@classmethod
@tornado.gen.coroutine
def update(cls, context, conn, dic, where_col='id', where_col_str=False):
"""
更新Something...
:param context:上下文
:param conn: 数据库连接
:param dic: 字典
:return:
"""
try:
# 如果真的不小心传了info进来,就转成dic...
if cls.DataInfo and isinstance(dic, cls.DataInfo):
dic = dic.to_dict()
# 如果不带id这属性,则是耍流氓~
assert where_col in dic
sql = 'update %s set ' % cls.table_name
for key, value in dic.items():
logging.info("%s=%s" % (key, value))
if key == where_col:
continue
# 转义
if key in cls.escape_list:
value = conn.escape(value)
sql += '`%s`=%s,' % (key, value)
# 普通带引号
elif key in cls.quot_list:
if not value: # 把None变成''
value = ''
sql += " `%s` = '%s'," % (key, value)
# 没有append操作的int
elif key in cls.not_append_list:
sql += " `%s` = %s," % (key, value)
# 有可能有append操作的list
elif key in cls.append_list:
if isinstance(value, tuple):
# 匹配value=元组的情况
# value = (1, False)
# value = (1, True)
if value[1]:
sql += ' `%s` = `%s` + %s,' % (key, key, value[0])
else:
sql += ' `%s` = %s,' % (key, value[0])
else:
# 匹配value=数值的情况
# value = 1
sql += ' `%s` = %s,' % (key, value[0])
sql = sql[0:-1] # 去掉最后一个逗号
where_value = dic[where_col]
if where_col_str:
sql += " where %s = '%s'" % (where_col, where_value)
else:
sql += ' where %s = %s' % (where_col, where_value)
logging.info(sql)
with conn.cursor() as cursor:
yield cursor.execute(sql)
except Exception, ex:
logging.info("table_name=%s" % cls.table_name)
logging.info("escape_list=%s" % cls.escape_list)
logging.info("quot_list=%s" % cls.quot_list)
logging.info("not_append_list=%s" % cls.not_append_list)
logging.info("append_list=%s" % cls.append_list)
raise ex
@classmethod
@tornado.gen.coroutine
def update_by_cols(cls, context, conn, dic, where_lst):
"""
更新Something...
:param context:上下文
:param conn: 数据库连接
:param dic: 字典
:param where_lst: where 参数列表
:return:
"""
try:
assert len(where_lst) >= 1
# 如果真的不小心传了info进来,就转成dic...
if cls.DataInfo and isinstance(dic, cls.DataInfo):
dic = dic.to_dict()
sql = 'update %s set ' % cls.table_name
for key, value in dic.items():
logging.info("%s=%s" % (key, value))
# 转义
if key in cls.escape_list:
value = conn.escape(value)
sql += '`%s`=%s,' % (key, value)
# 普通带引号
elif key in cls.quot_list:
if not value: # 把None变成''
value = ''
sql += " `%s` = '%s'," % (key, value)
# 没有append操作的int
elif key in cls.not_append_list:
sql += " `%s` = %s," % (key, value)
# 有可能有append操作的list
elif key in cls.append_list:
if isinstance(value, tuple):
# 匹配value=元组的情况
# value = (1, False)
# value = (1, True)
if value[1]:
sql += ' `%s` = `%s` + %s,' % (key, key, value[0])
else:
sql += ' `%s` = %s,' % (key, value[0])
else:
# 匹配value=数值的情况
# value = 1
sql += ' `%s` = %s,' % (key, value[0])
sql = sql[0:-1] # 去掉最后一个逗号
where_str = " where "
for dic in where_lst:
key = dic['key']
where_col = dic['where_col']
col_str = dic['col_str']
if col_str:
where_str += " %s = '%s' and" % (where_col, key)
else:
where_str += " %s = %s and" % (where_col, key)
where_str = where_str[0:-3] # 去掉最后的and
sql += where_str
logging.info(sql)
with conn.cursor() as cursor:
yield cursor.execute(sql)
except Exception, ex:
logging.info("table_name=%s" % cls.table_name)
logging.info("escape_list=%s" % cls.escape_list)
logging.info("quot_list=%s" % cls.quot_list)
logging.info("not_append_list=%s" % cls.not_append_list)
logging.info("append_list=%s" % cls.append_list)
raise ex
@classmethod
@tornado.gen.coroutine
def insert(cls, context, conn, _dic):
"""
插入Something...
:param context:
:param conn:
:param _dic: 新增的字典
:return:
"""
try:
# 如果真的不小心传了info进来,就转成dic...
if cls.DataInfo and isinstance(_dic, cls.DataInfo):
_dic = _dic.to_dict()
sql = "insert into %s( " % cls.table_name
for key, value in _dic.items():
if key in (cls.escape_list + cls.quot_list + cls.not_append_list + cls.append_list):
sql += "`%s`," % key
sql = sql[0:-1] # 去掉最后一个逗号
sql += ") values("
for key, value in _dic.items():
if key in cls.escape_list:
value = conn.escape(value)
sql += "%s," % value
elif key in cls.quot_list:
sql += "'%s'," % value
elif key in cls.not_append_list:
sql += '%s,' % value
elif key in cls.append_list:
sql += '%s,' % value
sql = sql[0:-1] # 去掉最后一个逗号
sql += ') '
logging.info("insert===> %s" % sql)
with conn.cursor() as cursor:
yield cursor.execute(sql)
logging.info("insert===>cursor.lastrowid=%s" % cursor.lastrowid)
raise tornado.gen.Return(cursor.lastrowid)
except tornado.gen.Return:
raise
except Exception, ex:
logging.info("table_name=%s" % cls.table_name)
logging.info("escape_list=%s" % cls.escape_list)
logging.info("quot_list=%s" % cls.quot_list)
logging.info("not_append_list=%s" % cls.not_append_list)
logging.info("append_list=%s" % cls.append_list)
raise ex
@classmethod
@tornado.gen.coroutine
def new(cls, context, conn, _dic):
lastrowid = yield cls.insert(context, conn, _dic)
raise tornado.gen.Return(lastrowid)
@classmethod
@tornado.gen.coroutine
def delete(cls, context, conn, obj_id):
"""
删除Something...
:param context: 上下文环境
:param conn: 数据库连接
:param obj_id: obj id
:return:
"""
with conn.cursor() as cursor:
sql = "update %s set del_flag=1 where id=%s" % (cls.table_name, obj_id)
logging.info("DELETE = %s" % sql)
yield cursor.execute(sql)
@classmethod
@tornado.gen.coroutine
def get_by_userid_type(cls, context, conn, user_id, type, page=0, limit=10):
"""
根据user_id + type来获取某些东西...
:param context:
:param conn:
:param user_id: user_id
:return:
"""
with conn.cursor() as cursor:
if page == 0:
sql = "select * from %s where user_id=%s and `type`=%s and del_flag=0" % (
cls.table_name, user_id, type)
else:
page = page if page else 1
start = (page - 1) * limit
sql = "select * from %s where user_id=%s and `type`=%s and del_flag=0 limit %s offset %s " % (
cls.table_name, user_id, type, limit, start)
logging.info(sql)
yield cursor.execute(sql)
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_by_userid(cls, context, conn, user_id, page=0, limit=10, orderby_col="", is_desc=False, is_fetchone=False):
"""
根据user_id 来获取某些东西...
:param context:
:param conn:
:param user_id: user_id
:return:
"""
with conn.cursor() as cursor:
if page == 0:
if orderby_col:
if is_desc:
sql = "select * from %s where user_id=%s and del_flag=0 order by `%s` desc" % (
cls.table_name, user_id, orderby_col)
else:
sql = "select * from %s where user_id=%s and del_flag=0 order by `%s` " % (
cls.table_name, user_id, orderby_col)
else:
sql = "select * from %s where user_id=%s and del_flag=0" % (
cls.table_name, user_id)
else:
page = page if page else 1
start = (page - 1) * limit
if orderby_col:
if is_desc:
sql = "select * from %s where user_id=%s and del_flag=0 order by `%s` desc limit %s offset %s " % (
cls.table_name, user_id, orderby_col, limit, start)
else:
sql = "select * from %s where user_id=%s and del_flag=0 order by `%s` limit %s offset %s " % (
cls.table_name, user_id, orderby_col, limit, start)
else:
sql = "select * from %s where user_id=%s and del_flag=0 limit %s offset %s " % (
cls.table_name, user_id, limit, start)
logging.info(sql)
yield cursor.execute(sql)
if is_fetchone:
item = cursor.fetchone()
if not item:
raise tornado.gen.Return(None)
info = cls.DataInfo(item)
raise tornado.gen.Return(info)
else:
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_by_id(cls, context, conn, obj_id):
"""
根据obj_id 来获取某些东西...
:param context:
:param conn:
:param obj_id: obj_id
:return:
"""
with conn.cursor() as cursor:
sql = "select * from %s where id=%s and del_flag=0" % (cls.table_name, obj_id)
yield cursor.execute(sql)
item = cursor.fetchone()
info = cls.DataInfo(item)
raise tornado.gen.Return(info)
@classmethod
@tornado.gen.coroutine
def get_all(cls, context, conn, page=0, limit=10, orderby_col="", is_desc=False):
"""
获取全部row
:param context:
:param conn:
:param page:
:param limit:
:param orderby_col:
:param is_desc:
:return:
"""
with conn.cursor() as cursor:
if page == 0:
if orderby_col:
if is_desc:
sql = "select * from %s where del_flag=0 order by `%s` desc" % (
cls.table_name, orderby_col)
else:
sql = "select * from %s where del_flag=0 order by `%s` " % (
cls.table_name, orderby_col)
else:
sql = "select * from %s where del_flag=0" % (
cls.table_name)
else:
page = page if page else 1
start = (page - 1) * limit
if orderby_col:
if is_desc:
sql = "select * from %s where del_flag=0 order by `%s` desc limit %s offset %s " % (
cls.table_name, orderby_col, limit, start)
else:
sql = "select * from %s where del_flag=0 order by `%s` limit %s offset %s " % (
cls.table_name, orderby_col, limit, start)
else:
sql = "select * from %s where del_flag=0 limit %s offset %s " % (
cls.table_name, limit, start)
logging.info(sql)
yield cursor.execute(sql)
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_by_col(cls, context, conn, key, where_col, col_str=False, is_fetchone=True, page=0, limit=10,
orderby_col="", is_desc=False, with_del=False, equal_lt_gt="="):
"""
根据obj_id 来获取某些东西...
:param where_col: 哪一列
:param col_str: 是否需要引号包裹
:return:
"""
with conn.cursor() as cursor:
_key_str = "'%s'" % key if col_str else "%s" % key
_desc_str = " desc " if is_desc else " "
_orderby_col_str = " order by `%s` %s" % (orderby_col, _desc_str) if orderby_col else " "
_page_limit_str = "limit %s offset %s" % (limit, (page - 1) * limit) if page > 0 else " "
_del_str = " " if with_del else " and del_flag=0 "
sql = "select * from %s where `%s` %s %s %s %s %s" % (
cls.table_name, where_col, equal_lt_gt, _key_str, _del_str, _orderby_col_str, _page_limit_str)
logging.info(sql)
yield cursor.execute(sql)
if is_fetchone:
item = cursor.fetchone()
if not item:
raise tornado.gen.Return(None)
info = cls.DataInfo(item)
raise tornado.gen.Return(info)
else:
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_by_cols(cls, context, conn, where_lst, is_fetchone=True, with_del=False, page=0, limit=10,
orderby_col="", is_desc=False):
"""
根据某n列来获取某些东西...不管是不是del
:param where_lst: 每个item={
'key': key,
'where_col': where_col, # 哪一列
'col_str': col_str, # col_str: 是否需要引号包裹
'equal_lt_gt': "=", #可供选择: =、 >=、 <=、 <、 >
}
:param
:return:
"""
with conn.cursor() as cursor:
assert len(where_lst) >= 1
_desc_str = " desc " if is_desc else " "
_orderby_col_str = " order by `%s` %s" % (orderby_col, _desc_str) if orderby_col else " "
_page_limit_str = "limit %s offset %s" % (limit, (page - 1) * limit) if page > 0 else " "
_del_str = " " if with_del else " and del_flag=0 "
sql = "select * from %s where " % cls.table_name
for dic in where_lst:
key = dic['key']
where_col = dic['where_col']
col_str = dic['col_str']
equal_str = "=" if "equal_lt_gt" not in dic else dic['equal_lt_gt']
_key_str = "'%s'" % key if col_str else "%s" % key
sql += " `%s` %s %s and" % (where_col, equal_str, _key_str)
# 去掉最后的and
if where_lst:
sql = sql[0:-3]
sql += " %s %s %s" % (_del_str, _orderby_col_str, _page_limit_str)
logging.info(sql)
yield cursor.execute(sql)
if is_fetchone:
item = cursor.fetchone()
if not item:
raise tornado.gen.Return(None)
info = cls.DataInfo(item)
raise tornado.gen.Return(info)
else:
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_all_len(cls, context, conn):
"""
获取表的row count...
:param context:
:param conn:
:return:
"""
with conn.cursor() as cursor:
sql = "select count(1) from %s where del_flag=0" % cls.table_name
yield cursor.execute(sql)
item = cursor.fetchone()
raise tornado.gen.Return(item['count(1)'])
@classmethod
@tornado.gen.coroutine
def get_len_by_cols(cls, context, conn, where_lst, with_del=False):
"""
获取表的row count...符合某些条件
:param context:
:param conn:
:return:
"""
with conn.cursor() as cursor:
assert len(where_lst) >= 1
_del_str = " " if with_del else " and del_flag=0 "
sql = "select count(1) from %s where " % cls.table_name
for dic in where_lst:
key = dic['key']
where_col = dic['where_col']
col_str = dic['col_str']
equal_str = "=" if "equal_lt_gt" not in dic else dic['equal_lt_gt']
_key_str = "'%s'" % key if col_str else "%s" % key
sql += " `%s` %s %s and" % (where_col, equal_str, _key_str)
sql = sql[0:-3]
sql += _del_str
logging.info(sql)
yield cursor.execute(sql)
item = cursor.fetchone()
raise tornado.gen.Return(item['count(1)'])
@classmethod
@tornado.gen.coroutine
def search_by_col(cls, context, conn, key, col):
"""
搜索by某一列
:param context:
:param conn:
:param key:
:param col:
:return:
"""
with conn.cursor() as cursor:
sql = "select * from " + cls.table_name + " where `" + col + "` like '%" + key + "%' and del_flag=0"
logging.info("sql = %s" % sql)
yield cursor.execute(sql)
infos = []
items = cursor.fetchall()
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def get_list_by_col(cls, context, conn, key, where_col, col_str=False, page=0, limit=10, orderby_col="",
is_desc=False):
"""
根据obj_id 来获取某些东西...
:param where_col: 哪一列
:param col_str: 是否需要引号包裹
:return:
"""
with conn.cursor() as cursor:
if orderby_col:
if col_str:
if is_desc:
sql = "select * from %s where `%s`='%s' and del_flag=0 order by `%s` desc " % (
cls.table_name, where_col, key, orderby_col)
else:
sql = "select * from %s where `%s`='%s' and del_flag=0 order by `%s` " % (
cls.table_name, where_col, key, orderby_col)
else:
if is_desc:
sql = "select * from %s where `%s`=%s and del_flag=0 order by `%s` desc " % (
cls.table_name, where_col, key, orderby_col)
else:
sql = "select * from %s where `%s`=%s and del_flag=0 order by `%s` " % (
cls.table_name, where_col, key, orderby_col)
else:
if col_str:
sql = "select * from %s where `%s`='%s' and del_flag=0 " % (
cls.table_name, where_col, key, orderby_col)
else:
sql = "select * from %s where `%s`=%s and del_flag=0 " % (
cls.table_name, where_col, key, orderby_col)
if page > 0:
start = (page - 1) * limit
sql += " limit %s offset %s " % (limit, start)
logging.info(sql)
yield cursor.execute(sql)
items = cursor.fetchall()
infos = []
for item in items:
info = cls.DataInfo(item)
infos.append(info)
raise tornado.gen.Return(infos)
@classmethod
@tornado.gen.coroutine
def delete_by_col(cls, context, conn, obj_id, where_col, col_str=False):
"""
删除Something...
:param context: 上下文环境
:param conn: 数据库连接
:param obj_id: 某一个id
:param where_col: 某一列
:return:
"""
with conn.cursor() as cursor:
if col_str:
sql = "update %s set del_flag=1 where `%s`='%s'" % (cls.table_name, where_col, obj_id)
else:
sql = "update %s set del_flag=1 where `%s`=%s" % (cls.table_name, where_col, obj_id)
logging.info("DELETE = %s" % sql)
yield cursor.execute(sql)
@classmethod
@tornado.gen.coroutine
def delete_by_cols(cls, context, conn, where_lst):
"""
删除Something...通过n个列
:param context: 上下文环境
:param conn: 数据库连接
:param where_lst: where 的list
:return:
"""
with conn.cursor() as cursor:
assert len(where_lst) >= 1
sql = "update %s set del_flag=1 where " % cls.table_name
for dic in where_lst:
key = dic['key']
where_col = dic['where_col']
col_str = dic['col_str']
if col_str:
sql += " `%s`= '%s' and" % (where_col, key)
else:
sql += " `%s`= %s and" % (where_col, key)
sql = sql[0:-3] # 去掉最后的and
logging.info("DELETE = %s" % sql)
yield cursor.execute(sql)
| 36.173913 | 123 | 0.468707 | 2,617 | 23,296 | 4.009935 | 0.060374 | 0.008195 | 0.041166 | 0.030684 | 0.877454 | 0.838956 | 0.820469 | 0.783972 | 0.762912 | 0.729083 | 0 | 0.007528 | 0.418355 | 23,296 | 643 | 124 | 36.230171 | 0.766937 | 0.023051 | 0 | 0.762791 | 0 | 0.011628 | 0.126809 | 0.001428 | 0 | 0 | 0 | 0 | 0.011628 | 0 | null | null | 0 | 0.004651 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
783ce3d5f88b0bae50a9d7e1573c4ff14a7bd1d7 | 133 | py | Python | teachers_sample_codes/spotkanie_1/00_start/gui.py | programujemy-python/programuj-w-zespole-test | 865f96e5be6ab4e3a7f15b9e446a1c0cbae06472 | [
"MIT"
] | 2 | 2022-01-31T20:21:18.000Z | 2022-02-22T10:54:41.000Z | teachers_materials/spotkanie_1/00_start/gui.py | abixadamj/Popojutrze-Progr-mujemy | d6f5a4de799a486024f799c4c392fdc1419654b8 | [
"MIT"
] | null | null | null | teachers_materials/spotkanie_1/00_start/gui.py | abixadamj/Popojutrze-Progr-mujemy | d6f5a4de799a486024f799c4c392fdc1419654b8 | [
"MIT"
] | 1 | 2022-03-07T11:23:58.000Z | 2022-03-07T11:23:58.000Z | import PySimpleGUI as sg
sg.popup("Hej - witamy w zespole w projekcie Progr@muj w zespole!",
title="Progr@muj w zespole")
| 22.166667 | 67 | 0.691729 | 21 | 133 | 4.380952 | 0.619048 | 0.26087 | 0.195652 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.210526 | 133 | 5 | 68 | 26.6 | 0.87619 | 0 | 0 | 0 | 0 | 0 | 0.560606 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
783e8e8485b3f52fb5ba32c069b76d54d4a872f8 | 36 | py | Python | zemfrog/templates/context/name.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | zemfrog/templates/context/name.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | zemfrog/templates/context/name.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | def init_context():
return {}
| 12 | 20 | 0.583333 | 4 | 36 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.277778 | 36 | 2 | 21 | 18 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
78ac0b319acc9f4b17af09d7ffd95ece0b55eac0 | 206 | py | Python | students/k3342/practical_works/Frolov_Alex/simple_django_web_project/admin.py | TonikX/ITMO_ICT_-WebProgramming_2020 | ba566c1b3ab04585665c69860b713741906935a0 | [
"MIT"
] | 10 | 2020-03-20T09:06:12.000Z | 2021-07-27T13:06:02.000Z | students/k3342/practical_works/Frolov_Alex/simple_django_web_project/admin.py | TonikX/ITMO_ICT_-WebProgramming_2020 | ba566c1b3ab04585665c69860b713741906935a0 | [
"MIT"
] | 134 | 2020-03-23T09:47:48.000Z | 2022-03-12T01:05:19.000Z | students/k3342/practical_works/Frolov_Alex/simple_django_web_project/admin.py | TonikX/ITMO_ICT_-WebProgramming_2020 | ba566c1b3ab04585665c69860b713741906935a0 | [
"MIT"
] | 71 | 2020-03-20T12:45:56.000Z | 2021-10-31T19:22:25.000Z | from django.contrib import admin
from .models import User, Avto, License, Ownership
admin.site.register(Avto)
admin.site.register(License)
admin.site.register(Ownership)
admin.site.register(User)
| 22.888889 | 51 | 0.776699 | 28 | 206 | 5.714286 | 0.428571 | 0.225 | 0.425 | 0.325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121359 | 206 | 8 | 52 | 25.75 | 0.883978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
78b099ee12ee9c640529ccec6c14789033b70a6e | 4,474 | py | Python | test/acceptance/test_groups.py | DiegoPomares/sargeparseN | 32424cd1a87c8efba4a2e2c08540478bec9f63a2 | [
"Apache-2.0"
] | 1 | 2018-09-05T12:51:16.000Z | 2018-09-05T12:51:16.000Z | test/acceptance/test_groups.py | DiegoPomares/sargeparseN | 32424cd1a87c8efba4a2e2c08540478bec9f63a2 | [
"Apache-2.0"
] | 23 | 2018-05-30T10:39:38.000Z | 2018-07-11T12:50:39.000Z | test/acceptance/test_groups.py | DiegoPomares/sargeparseN | 32424cd1a87c8efba4a2e2c08540478bec9f63a2 | [
"Apache-2.0"
] | 1 | 2018-07-10T16:42:26.000Z | 2018-07-10T16:42:26.000Z | # pylint: disable=redefined-outer-name
import sys
import shlex
import re
import pytest
import sargeparse
def test_mutex_group_required(capsys):
definition = {
'arguments': [
{
'names': ['-x'],
'mutex_group': 1,
},
{
'names': ['-y'],
'mutex_group': 1,
},
],
}
parser = sargeparse.Sarge(definition)
sys.argv = shlex.split('test -h')
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\[\s*?-x\s+?.+?\|\s*?-y\s+?.+?\]', captured.out)
definition['arguments'][0]['required'] = True
parser = sargeparse.Sarge(definition)
with pytest.raises(ValueError) as ex:
parser.parse()
assert 'must have the same value' in str(ex)
definition['arguments'][1]['required'] = True
parser = sargeparse.Sarge(definition)
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\(\s*?-x\s+?.+?\|\s*?-y\s+?.+?\)', captured.out)
def test_mutex_group_global(capsys):
definition = {
'arguments': [
{
'names': ['-x'],
'mutex_group': 1,
},
{
'names': ['-y'],
'mutex_group': 1,
},
],
}
parser = sargeparse.Sarge(definition)
sys.argv = shlex.split('test -h')
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\[\s*?-x\s+?.+?\|\s*?-y\s+?.+?\]', captured.out)
definition['arguments'][0]['global'] = True
parser = sargeparse.Sarge(definition)
with pytest.raises(ValueError) as ex:
parser.parse()
assert 'must have the same value' in str(ex)
definition['arguments'][1]['global'] = True
parser = sargeparse.Sarge(definition)
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\[\s*?-x\s+?.+?\|\s*?-y\s+?.+?\]', captured.out)
def test_mutex_group_global_required():
definition = {
'arguments': [
{
'names': ['-x'],
'mutex_group': 1,
'required': True,
'global': True,
},
{
'names': ['-y'],
'mutex_group': 1,
'required': False,
'global': True,
},
],
}
parser = sargeparse.Sarge(definition)
sys.argv = shlex.split('test -h')
parser = sargeparse.Sarge(definition)
with pytest.raises(ValueError) as ex:
parser.parse()
assert 'must have the same value' in str(ex)
def test_mutex_group_groups(capsys):
definition = {
'arguments': [
{
'names': ['-x'],
'mutex_group': 1,
},
{
'names': ['-y'],
'mutex_group': 1,
},
],
}
parser = sargeparse.Sarge(definition)
sys.argv = shlex.split('test -h')
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\[\s*?-x\s+?.+?\|\s*?-y\s+?.+?\]', captured.out)
definition['arguments'][0]['group'] = 'G'
parser = sargeparse.Sarge(definition)
with pytest.raises(ValueError) as ex:
parser.parse()
assert 'must have the same value' in str(ex)
definition['arguments'][1]['group'] = 'G'
parser = sargeparse.Sarge(definition)
with pytest.raises(SystemExit) as ex:
parser.parse()
captured = capsys.readouterr()
assert re.search(r'\[\s*?-x\s+?.+?\|\s*?-y\s+?.+?\]', captured.out)
def test_mutex_group_groups_required():
definition = {
'arguments': [
{
'names': ['-x'],
'mutex_group': 1,
'required': True,
'group': 'A',
},
{
'names': ['-y'],
'mutex_group': 1,
'required': False,
'group': 'B',
},
],
}
parser = sargeparse.Sarge(definition)
sys.argv = shlex.split('test -h')
parser = sargeparse.Sarge(definition)
with pytest.raises(ValueError) as ex:
parser.parse()
assert 'must have the same value' in str(ex)
| 25.420455 | 71 | 0.503576 | 456 | 4,474 | 4.881579 | 0.125 | 0.067385 | 0.122642 | 0.181042 | 0.939802 | 0.926774 | 0.922282 | 0.895328 | 0.895328 | 0.877358 | 0 | 0.005342 | 0.330577 | 4,474 | 175 | 72 | 25.565714 | 0.737896 | 0.008046 | 0 | 0.687943 | 0 | 0 | 0.162759 | 0.043282 | 0 | 0 | 0 | 0 | 0.078014 | 1 | 0.035461 | false | 0 | 0.035461 | 0 | 0.070922 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1529f24ef33932b92d5c3d074b2e8153f75783e2 | 195 | py | Python | bluebird/tensor.py | Stoick01/bluebird | a6ab5fcbf42da24ef8268ba6bc110b9eadd9a2ac | [
"MIT"
] | 1 | 2020-08-04T10:44:51.000Z | 2020-08-04T10:44:51.000Z | bluebird/tensor.py | Stoick01/bluebird | a6ab5fcbf42da24ef8268ba6bc110b9eadd9a2ac | [
"MIT"
] | 3 | 2021-06-02T03:33:48.000Z | 2022-03-12T01:00:23.000Z | bluebird/tensor.py | Stoick01/bluebird | a6ab5fcbf42da24ef8268ba6bc110b9eadd9a2ac | [
"MIT"
] | null | null | null | """
Tensor
======
Tensor is implement as ndarray from numpy
``from numpy import ndarray as Tensor``
"""
# TO DO: Implement Tensor class seperate from numpy
from numpy import ndarray as Tensor | 16.25 | 51 | 0.728205 | 28 | 195 | 5.071429 | 0.428571 | 0.253521 | 0.183099 | 0.253521 | 0.549296 | 0.549296 | 0.549296 | 0.549296 | 0 | 0 | 0 | 0 | 0.179487 | 195 | 12 | 52 | 16.25 | 0.8875 | 0.758974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1533b665d79dd211830d43a2c536f8e9dfffd9ec | 2,037 | py | Python | retirement/migrations/0045_auto_20200813_1548.py | MelanieFJNR/Blitz-API | 9a6daecd158fe07a6aeb80cbf586781eb688f0f9 | [
"MIT"
] | 3 | 2019-10-22T00:16:49.000Z | 2021-07-15T07:44:43.000Z | retirement/migrations/0045_auto_20200813_1548.py | MelanieFJNR/Blitz-API | 9a6daecd158fe07a6aeb80cbf586781eb688f0f9 | [
"MIT"
] | 1,183 | 2018-04-19T18:40:30.000Z | 2022-03-31T21:05:05.000Z | retirement/migrations/0045_auto_20200813_1548.py | MelanieFJNR/Blitz-API | 9a6daecd158fe07a6aeb80cbf586781eb688f0f9 | [
"MIT"
] | 12 | 2018-04-17T19:16:42.000Z | 2022-01-27T00:19:59.000Z | # Generated by Django 2.2.12 on 2020-08-13 19:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('retirement', '0044_auto_20200813_1523'),
]
operations = [
migrations.AlterField(
model_name='historicalretreat',
name='min_day_exchange',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Minimum days before the event for exchange'),
),
migrations.AlterField(
model_name='historicalretreat',
name='min_day_refund',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Minimum days before the event for refund'),
),
migrations.AlterField(
model_name='historicalretreat',
name='refund_rate',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Refund rate'),
),
migrations.AlterField(
model_name='historicalretreat',
name='seats',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Seats'),
),
migrations.AlterField(
model_name='retreat',
name='min_day_exchange',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Minimum days before the event for exchange'),
),
migrations.AlterField(
model_name='retreat',
name='min_day_refund',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Minimum days before the event for refund'),
),
migrations.AlterField(
model_name='retreat',
name='refund_rate',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Refund rate'),
),
migrations.AlterField(
model_name='retreat',
name='seats',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Seats'),
),
]
| 37.722222 | 128 | 0.621993 | 201 | 2,037 | 6.159204 | 0.238806 | 0.129241 | 0.161551 | 0.187399 | 0.875606 | 0.875606 | 0.836834 | 0.836834 | 0.751212 | 0.751212 | 0 | 0.021549 | 0.270987 | 2,037 | 53 | 129 | 38.433962 | 0.812121 | 0.022582 | 0 | 0.851064 | 1 | 0 | 0.209653 | 0.011564 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021277 | 0 | 0.085106 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
156ee8c8613936b50c710d7cb488f04edcc40103 | 6,351 | py | Python | tests/model/test_tree_model.py | chezou/molehill | 02254e6bf2185174112aad7c607f60305ce9b20c | [
"Apache-2.0"
] | 3 | 2019-03-13T09:01:10.000Z | 2022-03-25T16:34:54.000Z | tests/model/test_tree_model.py | chezou/molehill | 02254e6bf2185174112aad7c607f60305ce9b20c | [
"Apache-2.0"
] | 9 | 2019-02-15T08:53:37.000Z | 2019-03-13T06:48:05.000Z | tests/model/test_tree_model.py | chezou/molehill | 02254e6bf2185174112aad7c607f60305ce9b20c | [
"Apache-2.0"
] | null | null | null | import pytest
import molehill
from molehill.model import train_randomforest_classifier, train_randomforest_regressor
from molehill.model import predict_randomforest_classifier, predict_randomforest_regressor
from molehill.model import _extract_attrs
@pytest.fixture
def categorical_cols():
return ['cat1', 'cat2']
@pytest.fixture
def numerical_cols():
return ['num1', 'num2', 'num3']
def test_extract_attrs(categorical_cols, numerical_cols):
assert _extract_attrs(categorical_cols, numerical_cols) == '-attrs Q,Q,Q,C,C'
assert _extract_attrs([], numerical_cols) == '-attrs Q,Q,Q'
assert _extract_attrs(categorical_cols, []) == '-attrs C,C'
class TestSparseTrainModel:
def test_train_randomforest_classifier(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with models as (
select
train_randomforest_classifier(
features
, target_val
, '-trees 16'
)
from
src_tbl
)
-- DIGDAG_INSERT_LINE
select
model_id
, model_weight
, model
, concat_ws(',', collect_set(concat(k1, ':', v1))) as var_importance
, oob_errors
, oob_tests
from
models
lateral view explode(var_importance) t1 as k1, v1
group by 1, 2, 3, 5, 6
;
"""
assert train_randomforest_classifier("src_tbl", "target_val", "-trees 16", sparse=True) == ret_sql
def test_train_randomforest_regressor(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with models as (
select
train_randomforest_regressor(
features
, target_val
)
from
src_tbl
)
-- DIGDAG_INSERT_LINE
select
model_id
, model_weight
, model
, concat_ws(',', collect_set(concat(k1, ':', v1))) as var_importance
, oob_errors
, oob_tests
from
models
lateral view explode(var_importance) t1 as k1, v1
group by 1, 2, 3, 5, 6
;
"""
assert train_randomforest_regressor("src_tbl", "target_val", sparse=True) == ret_sql
class TestDenseTrainModel:
def test_train_randomforest_classifier(self, categorical_cols, numerical_cols):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
select
train_randomforest_classifier(
features
, target_val
, '-trees 16 -attrs Q,Q,Q,C,C'
)
from
src_tbl
;
"""
assert train_randomforest_classifier(
"src_tbl", "target_val", "-trees 16",
categorical_columns=categorical_cols, numerical_columns=numerical_cols) == ret_sql
def test_train_randomforest_regressor(self, categorical_cols, numerical_cols):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
select
train_randomforest_regressor(
features
, target_val
, '-attrs Q,Q,Q,C,C'
)
from
src_tbl
;
"""
assert train_randomforest_regressor(
"src_tbl", "target_val",
categorical_columns=categorical_cols, numerical_columns=numerical_cols) == ret_sql
class TestPredictClassifier:
def test_predict_randomforest_classifier(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with p as (
select
model_id
, model_weight
, model
from
model_tbl
DISTRIBUTE BY rand(1)
),
t1 as (
select
t.id
, p.model_weight
, tree_predict(p.model_id, p.model, t.features, "-classification") as predicted
from
p
left outer join target_tbl t
),
ensembled as (
select
id
, rf_ensemble(predicted.value, predicted.posteriori, model_weight) as predicted
from
t1
group by
id
)
-- DIGDAG_INSERT_LINE
select
id
, predicted.label
, predicted.probabilities[1] as probability
from
ensembled
;
"""
pred_sql, pred_col = predict_randomforest_classifier("target_tbl", "id", "model_tbl")
assert pred_sql == ret_sql
assert pred_col == "probability"
def test_predict_randomforest_classifier_hashing(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with p as (
select
model_id
, model_weight
, model
from
model_tbl
DISTRIBUTE BY rand(1)
),
t1 as (
select
t.id
, p.model_weight
, tree_predict(p.model_id, p.model, feature_hashing(t.features), "-classification") as predicted
from
p
left outer join target_tbl t
),
ensembled as (
select
id
, rf_ensemble(predicted.value, predicted.posteriori, model_weight) as predicted
from
t1
group by
id
)
-- DIGDAG_INSERT_LINE
select
id
, predicted.label
, predicted.probabilities[1] as probability
from
ensembled
;
"""
pred_sql, pred_col = predict_randomforest_classifier("target_tbl", "id", "model_tbl", hashing=True)
assert pred_sql == ret_sql
assert pred_col == "probability"
class TestPredictRegressor:
def test_predict_randomforest_regressor(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with p as (
select
model_id
, model_weight
, model
from
model_tbl
DISTRIBUTE BY rand(1)
),
t1 as (
select
t.id
, p.model_weight
, tree_predict(p.model_id, p.model, t.features) as predicted
from
p
left outer join target_tbl t
),
ensembled as (
select
id
, rf_ensemble(predicted.value, predicted.posteriori, model_weight) as predicted
from
t1
group by
id
)
-- DIGDAG_INSERT_LINE
select
id
, predicted.label
, predicted.probabilities[1] as probability
from
ensembled
;
"""
pred_sql, pred_col = predict_randomforest_regressor("target_tbl", "id", "model_tbl")
assert pred_sql == ret_sql
assert pred_col == "target"
def test_predict_regressor_hashing(self):
ret_sql = f"""\
-- client: molehill/{molehill.__version__}
with p as (
select
model_id
, model_weight
, model
from
model_tbl
DISTRIBUTE BY rand(1)
),
t1 as (
select
t.id
, p.model_weight
, tree_predict(p.model_id, p.model, feature_hashing(t.features)) as predicted
from
p
left outer join target_tbl t
),
ensembled as (
select
id
, rf_ensemble(predicted.value, predicted.posteriori, model_weight) as predicted
from
t1
group by
id
)
-- DIGDAG_INSERT_LINE
select
id
, predicted.label
, predicted.probabilities[1] as probability
from
ensembled
;
"""
pred_sql, pred_col = predict_randomforest_regressor("target_tbl", "id", "model_tbl", hashing=True)
assert pred_sql == ret_sql
assert pred_col == "target"
| 21.240803 | 107 | 0.687766 | 812 | 6,351 | 5.082512 | 0.124384 | 0.023261 | 0.013569 | 0.0252 | 0.8992 | 0.877635 | 0.81827 | 0.80567 | 0.781682 | 0.756966 | 0 | 0.00982 | 0.214297 | 6,351 | 298 | 108 | 21.312081 | 0.817234 | 0 | 0 | 0.766423 | 0 | 0.00365 | 0.609449 | 0.142992 | 0 | 0 | 0 | 0 | 0.054745 | 1 | 0.040146 | false | 0 | 0.032847 | 0.007299 | 0.094891 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ecaa5350e1e7e0523f7758e494938a6c13cb82e8 | 27,065 | py | Python | nova/tests/unit/api_samples_test_base/test_compare_result.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api_samples_test_base/test_compare_result.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api_samples_test_base/test_compare_result.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright 2015 HPE, Inc.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'copy'
newline|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'import'
name|'testtools'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'functional'
name|'import'
name|'api_samples_test_base'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestCompareResult
name|'class'
name|'TestCompareResult'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Provide test coverage for result comparison logic in functional tests.\n\n _compare_result two types of comparisons, template data and sample\n data.\n\n Template data means the response is checked against a regex that is\n referenced by the template name. The template name is specified in\n the format %(name)\n\n Sample data is a normal value comparison.\n """'
newline|'\n'
nl|'\n'
DECL|member|getApiSampleTestBaseHelper
name|'def'
name|'getApiSampleTestBaseHelper'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Build an instance without running any unwanted test methods"""'
newline|'\n'
nl|'\n'
comment|'# NOTE(auggy): TestCase takes a "test" method name to run in __init__'
nl|'\n'
comment|'# calling this way prevents additional test methods from running'
nl|'\n'
name|'ast_instance'
op|'='
name|'api_samples_test_base'
op|'.'
name|'ApiSampleTestBase'
op|'('
string|"'setUp'"
op|')'
newline|'\n'
nl|'\n'
comment|'# required by ApiSampleTestBase'
nl|'\n'
name|'ast_instance'
op|'.'
name|'api_major_version'
op|'='
string|"'v2'"
newline|'\n'
name|'ast_instance'
op|'.'
name|'_project_id'
op|'='
string|"'True'"
newline|'\n'
nl|'\n'
comment|'# automagically create magic methods usually handled by test classes'
nl|'\n'
name|'ast_instance'
op|'.'
name|'compute'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'ast_instance'
op|'.'
name|'subs'
op|'='
name|'ast_instance'
op|'.'
name|'_get_regexes'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'ast_instance'
newline|'\n'
nl|'\n'
DECL|member|setUp
dedent|''
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestCompareResult'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'ast'
op|'='
name|'self'
op|'.'
name|'getApiSampleTestBaseHelper'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_bare_strings_match
dedent|''
name|'def'
name|'test_bare_strings_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""compare 2 bare strings that match"""'
newline|'\n'
name|'sample_data'
op|'='
string|"u'foo'"
newline|'\n'
name|'response_data'
op|'='
string|"u'foo'"
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(auggy): _compare_result will not return a matched value in the'
nl|'\n'
comment|"# case of bare strings. If they don't match it will throw an exception,"
nl|'\n'
comment|'# otherwise it returns "None".'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'None'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of 2 bare strings'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_bare_strings_no_match
dedent|''
name|'def'
name|'test_bare_strings_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 bare strings that don\'t match"""'
newline|'\n'
name|'sample_data'
op|'='
string|"u'foo'"
newline|'\n'
name|'response_data'
op|'='
string|"u'bar'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_template_strings_match
dedent|''
dedent|''
name|'def'
name|'test_template_strings_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""compare 2 template strings (contain %) that match"""'
newline|'\n'
name|'template_data'
op|'='
string|"u'%(id)s'"
newline|'\n'
name|'response_data'
op|'='
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of 2 template strings'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_template_strings_no_match
dedent|''
name|'def'
name|'test_template_strings_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 template strings (contain %) that don\'t match"""'
newline|'\n'
name|'template_data'
op|'='
string|"u'%(id)s'"
newline|'\n'
name|'response_data'
op|'='
string|"u'$58f295a-8543-45fa-804a-08f8356d616d'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
comment|'# TODO(auggy): _compare_result needs a consistent return value'
nl|'\n'
comment|'# In some cases it returns the value if it matched, in others it returns'
nl|'\n'
comment|"# None. In all cases, it throws an exception if there's no match."
nl|'\n'
DECL|member|test_bare_int_match
dedent|''
dedent|''
name|'def'
name|'test_bare_int_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 bare ints that match"""'
newline|'\n'
name|'sample_data'
op|'='
number|'42'
newline|'\n'
name|'response_data'
op|'='
number|'42'
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'None'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of 2 bare ints'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_bare_int_no_match
dedent|''
name|'def'
name|'test_bare_int_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 bare ints that don\'t match"""'
newline|'\n'
name|'sample_data'
op|'='
number|'42'
newline|'\n'
name|'response_data'
op|'='
number|'43'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
comment|'# TODO(auggy): _compare_result needs a consistent return value'
nl|'\n'
DECL|member|test_template_int_match
dedent|''
dedent|''
name|'def'
name|'test_template_int_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check template int against string containing digits"""'
newline|'\n'
name|'template_data'
op|'='
string|"u'%(int)s'"
newline|'\n'
name|'response_data'
op|'='
string|"u'42'"
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'None'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of template ints'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_template_int_no_match
dedent|''
name|'def'
name|'test_template_int_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check template int against a string containing no digits"""'
newline|'\n'
name|'template_data'
op|'='
string|"u'%(int)s'"
newline|'\n'
name|'response_data'
op|'='
string|"u'foo'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_template_int_value
dedent|''
dedent|''
name|'def'
name|'test_template_int_value'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check an int value of a template int throws exception"""'
newline|'\n'
nl|'\n'
comment|"# template_data = u'%(int_test)'"
nl|'\n'
comment|'# response_data = 42'
nl|'\n'
nl|'\n'
comment|'# use an int instead of a string as the subs value'
nl|'\n'
name|'local_subs'
op|'='
name|'copy'
op|'.'
name|'deepcopy'
op|'('
name|'self'
op|'.'
name|'ast'
op|'.'
name|'subs'
op|')'
newline|'\n'
name|'local_subs'
op|'.'
name|'update'
op|'('
op|'{'
string|"'int_test'"
op|':'
number|'42'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'TypeError'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'subs'
op|'='
name|'local_subs'
newline|'\n'
nl|'\n'
comment|'# TODO(auggy): _compare_result needs a consistent return value'
nl|'\n'
DECL|member|test_dict_match
dedent|''
dedent|''
name|'def'
name|'test_dict_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 matching dictionaries"""'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'%(id)s'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'%(password)s'"
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'4ZQ3bb6WYbC2'"
op|'}'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of 2 dictionaries'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dict_no_match_value
dedent|''
name|'def'
name|'test_dict_no_match_value'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 dictionaries where one has a different value"""'
newline|'\n'
name|'sample_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'foo'"
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'4ZQ3bb6WYbC2'"
op|'}'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dict_no_match_extra_key
dedent|''
dedent|''
name|'def'
name|'test_dict_no_match_extra_key'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 dictionaries where one has an extra key"""'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'%(id)s'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'%(password)s'"
op|','
nl|'\n'
string|"u'foo'"
op|':'
string|"u'foo'"
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'858f295a-8543-45fa-804a-08f8356d616d'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'4ZQ3bb6WYbC2'"
op|'}'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dict_result_type_mismatch
dedent|''
dedent|''
name|'def'
name|'test_dict_result_type_mismatch'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check expected is a dictionary and result is not a dictionary"""'
newline|'\n'
nl|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'id'"
op|':'
string|"u'%(id)s'"
op|','
nl|'\n'
string|"u'adminPass'"
op|':'
string|"u'%(password)s'"
op|','
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
string|"u'foo'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
comment|'# TODO(auggy): _compare_result needs a consistent return value'
nl|'\n'
DECL|member|test_list_match
dedent|''
dedent|''
name|'def'
name|'test_list_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 matching lists"""'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(versioned_compute_endpoint)s/server/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(compute_endpoint)s/servers/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/v2/%s/server/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|'%'
nl|'\n'
name|'api_samples_test_base'
op|'.'
name|'PROJECT_ID'
nl|'\n'
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/%s/servers/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|'%'
nl|'\n'
name|'api_samples_test_base'
op|'.'
name|'PROJECT_ID'
nl|'\n'
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'None'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of 2 lists'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_match_extra_item_result
dedent|''
name|'def'
name|'test_list_match_extra_item_result'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check extra list items in result """'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(versioned_compute_endpoint)s/server/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(compute_endpoint)s/servers/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/v2/openstack/server/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/openstack/servers/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
op|','
nl|'\n'
string|"u'foo'"
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_match_extra_item_template
dedent|''
dedent|''
name|'def'
name|'test_list_match_extra_item_template'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check extra list items in template """'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(versioned_compute_endpoint)s/server/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
string|"u'%(compute_endpoint)s/servers/%(uuid)s'"
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
op|','
nl|'\n'
string|"u'foo'"
comment|'# extra field'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'links'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/v2/openstack/server/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'self'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'href'"
op|':'
nl|'\n'
op|'('
string|"u'http://openstack.example.com/openstack/servers/'"
nl|'\n'
string|"'858f295a-8543-45fa-804a-08f8356d616d'"
op|')'
op|','
nl|'\n'
string|"u'rel'"
op|':'
string|"u'bookmark'"
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_no_match
dedent|''
dedent|''
name|'def'
name|'test_list_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check 2 matching lists"""'
newline|'\n'
name|'template_data'
op|'='
op|'{'
nl|'\n'
string|"u'things'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'foo'"
op|':'
string|"u'bar'"
op|','
nl|'\n'
string|"u'baz'"
op|':'
number|'0'
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'foo'"
op|':'
string|"u'zod'"
op|','
nl|'\n'
string|"u'baz'"
op|':'
number|'1'
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'response_data'
op|'='
op|'{'
nl|'\n'
string|"u'things'"
op|':'
nl|'\n'
op|'['
nl|'\n'
op|'{'
nl|'\n'
string|"u'foo'"
op|':'
string|"u'bar'"
op|','
nl|'\n'
string|"u'baz'"
op|':'
string|"u'0'"
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'{'
nl|'\n'
string|"u'foo'"
op|':'
string|"u'zod'"
op|','
nl|'\n'
string|"u'baz'"
op|':'
number|'1'
nl|'\n'
op|'}'
nl|'\n'
op|']'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
comment|'# TODO(auggy): This error returns "extra list items"'
nl|'\n'
comment|"# it should show the item/s in the list that didn't match"
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_none_match
dedent|''
dedent|''
name|'def'
name|'test_none_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check that None matches"""'
newline|'\n'
name|'sample_data'
op|'='
name|'None'
newline|'\n'
name|'response_data'
op|'='
name|'None'
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(auggy): _compare_result will not return a matched value in the'
nl|'\n'
comment|"# case of bare strings. If they don't match it will throw an exception,"
nl|'\n'
comment|'# otherwise it returns "None".'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'None'
op|','
nl|'\n'
name|'observed'
op|'='
name|'result'
op|','
nl|'\n'
name|'message'
op|'='
string|"'Check _compare_result of None'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_none_no_match
dedent|''
name|'def'
name|'test_none_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check expected none and non-None response don\'t match"""'
newline|'\n'
name|'sample_data'
op|'='
name|'None'
newline|'\n'
name|'response_data'
op|'='
string|"u'bar'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_none_result_no_match
dedent|''
dedent|''
name|'def'
name|'test_none_result_no_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check result none and expected non-None response don\'t match"""'
newline|'\n'
name|'sample_data'
op|'='
string|"u'foo'"
newline|'\n'
name|'response_data'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'api_samples_test_base'
op|'.'
name|'NoMatch'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'sample_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_template_no_subs_key
dedent|''
dedent|''
name|'def'
name|'test_template_no_subs_key'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""check an int value of a template int throws exception"""'
newline|'\n'
name|'template_data'
op|'='
string|"u'%(foo)'"
newline|'\n'
name|'response_data'
op|'='
string|"'bar'"
newline|'\n'
nl|'\n'
name|'with'
name|'testtools'
op|'.'
name|'ExpectedException'
op|'('
name|'KeyError'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ast'
op|'.'
name|'_compare_result'
op|'('
nl|'\n'
name|'expected'
op|'='
name|'template_data'
op|','
nl|'\n'
name|'result'
op|'='
name|'response_data'
op|','
nl|'\n'
name|'result_str'
op|'='
string|'"Test"'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
endmarker|''
end_unit
| 14.030586 | 420 | 0.610419 | 4,213 | 27,065 | 3.826015 | 0.060527 | 0.059557 | 0.061728 | 0.046343 | 0.862585 | 0.84298 | 0.824245 | 0.803772 | 0.758484 | 0.743532 | 0 | 0.015238 | 0.105265 | 27,065 | 1,928 | 421 | 14.037863 | 0.650396 | 0 | 0 | 0.935166 | 0 | 0.000519 | 0.823351 | 0.12773 | 0 | 0 | 0 | 0 | 0.003631 | 0 | null | null | 0.005187 | 0.002593 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ecb19fa40ea772a96ed4b8b57e5259a6b08c2a42 | 23,993 | py | Python | tests/test_hbs.py | nkarjala/tf-vrouter-1 | dd8606fcc6b91e041130276aead42433978e4ced | [
"BSD-2-Clause"
] | 1 | 2022-01-20T03:23:49.000Z | 2022-01-20T03:23:49.000Z | tests/test_hbs.py | nkarjala/tf-vrouter-1 | dd8606fcc6b91e041130276aead42433978e4ced | [
"BSD-2-Clause"
] | null | null | null | tests/test_hbs.py | nkarjala/tf-vrouter-1 | dd8606fcc6b91e041130276aead42433978e4ced | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python
from topo_base.fabric_to_vm_inter_vn import FabricToVmInterVn
from topo_base.fabric_to_vm_intra_vn import FabricToVmIntraVn
from topo_base.vm_to_fabric_inter_vn import VmToFabricInterVn
from topo_base.vm_to_fabric_intra_vn import VmToFabricIntraVn
from topo_base.vm_to_vm_inter_vn import VmToVmInterVn
from topo_base.vm_to_vm_intra_vn import VmToVmIntraVn
import os
import sys
sys.path.append(os.getcwd())
sys.path.append(os.getcwd() + '/lib/')
from imports import * # noqa
class TestHbsFabricToVmInterVn(FabricToVmInterVn):
def test_hbs_fabric_to_vm_inter_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=3,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=4,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=5,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=3,
vrf_hbfr_vif_idx=4)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send mplsudp packet from fabric
icmp_inner = IcmpPacket(
sip='2.2.2.3',
dip='1.1.1.3',
icmp_type=constants.ECHO_REPLY,
id=4145)
pkt = icmp_inner.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
mpls = MplsoUdpPacket(
label=42,
sip='8.0.0.3',
dip='8.0.0.2',
smac='00:1b:21:bb:f9:46',
dmac='00:1b:21:bb:f9:48',
sport=53363,
dport=6635,
id=10,
inner_pkt=pkt)
pkt = mpls.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# Make sure the packet comes goes to hbs-r (tap8b05a86b-36)
rcv_pkt = self.fabric_interface.send_and_receive_packet(
pkt, hbs_r_vif)
# TODO: Send the rcv_pkt to the next call instead of
# forming a new packet
# Inject the packet from hbs-l to vrouter
# Encode the flow id in the dst mac of the packet
icmp = IcmpPacket(
sip='1.0.0.5',
dip='1.0.0.3',
smac='00:00:5e:00:01:00',
dmac='c0:d2:00:06:08:f0',
icmp_type=0,
id=4145)
pkt = icmp.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# Send it to hbs-l
rcv_pkt = hbs_l_vif.send_and_receive_packet(pkt, self.tenant_vif)
# Check if the packet was sent to vrouter (by vtest) on fabric
# and received at tenant_vif (by vtest)
self.assertEqual(1, self.fabric_interface.get_vif_ipackets())
self.assertEqual(1, self.tenant_vif.get_vif_opackets())
# Check if the packet was sent to hbs-r (by vrouter)
# and received at hbs-l (by vtest)
self.assertEqual(1, hbs_r_vif.get_vif_opackets())
self.assertEqual(1, hbs_l_vif.get_vif_ipackets())
class TestHbsFabricToVmIntraVn(FabricToVmIntraVn):
def test_hbs_fabric_to_vm_intra_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=3,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=4,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add Bridge Route
bridge_route = BridgeRoute(
vrf=5,
mac_str="02:c2:23:4c:d0:55",
nh_idx=44)
bridge_route.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=5,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=3,
vrf_hbfr_vif_idx=4)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send mplsudp packet from fabric
icmp_inner = IcmpPacket(
sip='1.1.1.5',
dip='1.1.1.3',
smac='02:e7:03:ea:67:f1',
dmac='02:c2:23:4c:d0:55',
icmp_type=constants.ECHO_REPLY,
id=4145)
pkt = icmp_inner.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
mpls = MplsoUdpPacket(
label=42,
sip='8.0.0.3',
dip='8.0.0.2',
smac='00:1b:21:bb:f9:46',
dmac='00:1b:21:bb:f9:48',
sport=53363,
dport=6635,
id=10,
inner_pkt=pkt)
pkt = mpls.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# Make sure the packet comes goes to hbs-r (tap8b05a86b-36)
hbsr_pkt = self.fabric_interface.send_and_receive_packet(
pkt, hbs_r_vif)
# Send it to hbs-l
tenant_pkt = hbs_l_vif.send_and_receive_packet(
hbsr_pkt, self.tenant_vif)
self.assertIsNotNone(tenant_pkt)
self.assertTrue(ICMP in tenant_pkt)
self.assertEqual("1.1.1.5", tenant_pkt[IP].src)
self.assertEqual("1.1.1.3", tenant_pkt[IP].dst)
self.assertEqual("02:c2:23:4c:d0:55", tenant_pkt[Ether].dst)
self.assertEqual("02:e7:03:ea:67:f1", tenant_pkt[Ether].src)
# Check if the packet was sent to vrouter (by vtest) on fabric
# and received at tenant_vif (by vtest)
self.assertEqual(1, self.fabric_interface.get_vif_ipackets())
self.assertEqual(1, self.tenant_vif.get_vif_opackets())
# Check if the packet was sent to hbs-r (by vrouter)
# and received at hbs-l (by vtest)
self.assertEqual(1, hbs_r_vif.get_vif_opackets())
self.assertEqual(1, hbs_l_vif.get_vif_ipackets())
def test_hbs_cem_11144(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=3,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=4,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add Bridge Route
bridge_route = BridgeRoute(
vrf=5,
mac_str="02:c2:23:4c:d0:55",
nh_idx=44)
bridge_route.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=5,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=3,
vrf_hbfr_vif_idx=4)
vrf.sync()
self.f_flow.delete()
self.r_flow.delete()
# send mplsudp packet from fabric
# This creates a flow in hold state
icmp_inner = IcmpPacket(
sip='1.1.1.5',
dip='1.1.1.3',
smac='02:e7:03:ea:67:f1',
dmac='02:c2:23:4c:d0:55',
icmp_type=constants.ECHO_REPLY,
id=4145)
pkt = icmp_inner.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
mpls = MplsoUdpPacket(
label=42,
sip='8.0.0.3',
dip='8.0.0.2',
smac='00:1b:21:bb:f9:46',
dmac='00:1b:21:bb:f9:48',
sport=53363,
dport=6635,
id=10,
inner_pkt=pkt)
pkt = mpls.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# Send the packet from fabric
rcv_pkt = self.fabric_interface.send_packet(
pkt)
# Flow is created but in Hold state
# Set forwarding action for rflow now
self.r_flow.fr_gen_id = self.r_flow.fr_gen_id + 1
self.r_flow.fr_flags = constants.VR_FLOW_FLAG_ACTIVE
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.r_flow.sync(resp_required=True)
# Wait for some time for the held packet to be flushed by vrouter
time.sleep(2)
# Check if the flushed packet was sent by vrouter on hbs-r
self.assertEqual(1, hbs_r_vif.get_vif_opackets())
class TestHbsVmToFabricInterVn(VmToFabricInterVn):
def test_hbs_vm_to_fabric_inter_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=4,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=5,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add hbs-l and hbs-r in vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=2,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=4,
vrf_hbfr_vif_idx=5)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
icmp_pkt = IcmpPacket(
sip='1.1.1.3',
dip='2.2.2.3',
smac='02:03:eb:4b:e8:d8',
dmac='00:00:5e:00:01:00',
id=1418)
pkt = icmp_pkt.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# send packet
hbfl_pkt = self.tenant_vif.send_and_receive_packet(pkt, hbs_l_vif)
self.assertIsNotNone(hbfl_pkt)
hbfl_pkt.show()
# Send it to hbs-r and expect response on fabric
fab_pkt = hbs_r_vif.send_and_receive_packet(
hbfl_pkt, self.fabric_interface)
self.assertIsNotNone(fab_pkt)
fab_pkt.show()
# Check if fabric got a MPLSoUDP packet
self.assertTrue((UDP in fab_pkt) and (fab_pkt[UDP].dport == 6635))
# Check if the packet was sent to vrouter (by vtest) on tenant_vif
# and received at fabric (by vtest)
self.assertEqual(1, self.tenant_vif.get_vif_ipackets())
self.assertEqual(1, self.fabric_interface.get_vif_opackets())
# Check if the packet was sent to hbs-l (by vrouter)
# and received at hbs-r (by vtest)
self.assertEqual(1, hbs_l_vif.get_vif_opackets())
self.assertEqual(1, hbs_r_vif.get_vif_ipackets())
class TestHbsVmToFabricIntraVn(VmToFabricIntraVn):
def test_hbs_vm_to_fabric_intra_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=3,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=4,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=5,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=3,
vrf_hbfr_vif_idx=4)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send ping request from tenant_vif
icmp = IcmpPacket(
sip='1.0.0.3',
dip='1.0.0.5',
smac='02:c2:23:4c:d0:55',
dmac='02:e7:03:ea:67:f1',
id=4145)
pkt = icmp.get_packet()
pkt.show()
# send packet and receive on hbs-l
hbsl_pkt = self.tenant_vif.send_and_receive_packet(pkt, hbs_l_vif)
# Inject the packet from hbs-r to vrouter
# Encode the flow id in the src mac of the packet
icmp = IcmpPacket(
sip='1.0.0.3',
dip='1.0.0.5',
smac='ca:f1:00:00:d9:d4',
dmac='02:e7:03:ea:67:f1',
id=4145)
pkt = icmp.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# Send it to hbs-r and expect response on fabric vif
fabric_pkt = hbs_r_vif.send_and_receive_packet(
hbsl_pkt, self.fabric_vif)
self.assertIsNotNone(fabric_pkt)
self.assertTrue(UDP in fabric_pkt)
self.assertEqual(6635, fabric_pkt[UDP].dport)
self.assertEqual("8.0.0.2", fabric_pkt[IP].src)
self.assertEqual("8.0.0.3", fabric_pkt[IP].dst)
# Check if the packet was sent to vrouter (by vtest) on tenant_vif
# and received at fabric (by test)
self.assertEqual(1, self.tenant_vif.get_vif_ipackets())
self.assertEqual(1, self.fabric_vif.get_vif_opackets())
# Check if the packet was sent to hbs-l (by vrouter)
# and received at hbs-r (by vtest)
self.assertEqual(1, hbs_l_vif.get_vif_opackets())
self.assertEqual(1, hbs_r_vif.get_vif_ipackets())
class TestHbsVmToVmInterVn(VmToVmInterVn):
def test_hbs_vm_to_vm_inter_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap3",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=5,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap4",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=6,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add vif3 Nexthop (bridge)
# pkt from hbs-r to vif 3 will need a lookup of dst-mac
# in the bridge table
# this is because dmac would have been encoded with flow id
vif3_nhb = EncapNextHop(
encap_oif_id=self.vif3.idx(),
encap="02 88 67 0c 2e 11 00 00 5e 00 01 00 08 00",
nh_idx=27,
nh_family=constants.AF_BRIDGE,
nh_vrf=3,
nh_flags=constants.NH_FLAG_POLICY_ENABLED |
constants.NH_FLAG_ETREE_ROOT)
vif3_nhb.sync()
# Add bridge Route
bridge_route = BridgeRoute(
vrf=3,
mac_str="02:88:67:0c:2e:11",
nh_idx=27)
bridge_route.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=3,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=5,
vrf_hbfr_vif_idx=6)
vrf.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=4,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=5,
vrf_hbfr_vif_idx=6)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send ping request from vif3 and receive in hbs-l
icmp = IcmpPacket(
sip='1.1.1.4',
dip='2.2.2.4',
smac='02:88:67:0c:2e:11',
dmac='00:00:5e:00:01:00',
id=1136)
pkt = icmp.get_packet()
pkt.show()
# send packet
hbsl_pkt = self.vif3.send_and_receive_packet(pkt, hbs_l_vif)
# send hbsl packet to hbs-r
vif4_pkt = hbs_r_vif.send_and_receive_packet(hbsl_pkt, self.vif4)
# check if we got ICMP packet
self.assertTrue(ICMP in vif4_pkt)
self.assertEqual('1.1.1.4', vif4_pkt[IP].src)
self.assertEqual('2.2.2.4', vif4_pkt[IP].dst)
# send ping response from tenant_vif4 and receive in hbs-r
icmp = IcmpPacket(
sip='2.2.2.4',
dip='1.1.1.4',
smac='02:e7:03:ea:67:f1',
dmac='00:00:5e:00:01:00',
icmp_type=constants.ECHO_REPLY,
id=1136)
pkt = icmp.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# send packet
hbsr_pkt = self.vif4.send_and_receive_packet(pkt, hbs_r_vif)
hbsr_pkt.show()
# TODO: Use hbsr_pkt instead of this
#
# send ping response from hbs-r and receive in tenant_vif3
icmp = IcmpPacket(
sip='2.2.2.4',
dip='1.1.1.4',
smac='00:00:5e:00:01:00',
dmac='c0:d1:00:04:05:8c',
icmp_type=constants.ECHO_REPLY,
id=1136)
pkt = icmp.get_packet()
pkt.show()
self.assertIsNotNone(pkt)
# send packet
vif3_pkt = hbs_l_vif.send_and_receive_packet(pkt, self.vif3)
# check if we got ICMP packet
self.assertTrue(ICMP in vif4_pkt)
self.assertEqual('2.2.2.4', vif3_pkt[IP].src)
self.assertEqual('1.1.1.4', vif3_pkt[IP].dst)
# Check if the packet was sent on tenant_vif3 and received at hbs-l
self.assertEqual(1, self.vif3.get_vif_ipackets())
self.assertEqual(1, hbs_l_vif.get_vif_opackets())
# Check if the packet was sent to hbs-r and received from tenant_vif4
self.assertEqual(1, hbs_r_vif.get_vif_opackets())
self.assertEqual(1, self.vif4.get_vif_ipackets())
# Check if the packet was sent on tenant_vif4 and received at hbs-r
self.assertEqual(1, self.vif4.get_vif_opackets())
self.assertEqual(1, hbs_r_vif.get_vif_ipackets())
# Check if the packet was sent to hbs-l and received from tenant_vif3
self.assertEqual(1, self.vif3.get_vif_opackets())
self.assertEqual(1, hbs_l_vif.get_vif_ipackets())
class TestHbsVmToVmIntraVn(VmToVmIntraVn):
def test_hbs_left_vm_to_right_vm_intra_vm(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=5,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=6,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=2,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=5,
vrf_hbfr_vif_idx=6)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send ping request from tenant_vif3
icmp = IcmpPacket(
sip='1.1.1.4',
dip='1.1.1.5',
smac='02:88:67:0c:2e:11',
dmac='02:e7:03:ea:67:f1',
id=1136)
pkt = icmp.get_packet()
pkt.show()
# send packet and receive on hbs-l
hbsl_pkt = self.vif3.send_and_receive_packet(pkt, hbs_l_vif)
# send the packet on hbs-r and receive in vif4
vif4_pkt = hbs_r_vif.send_and_receive_packet(hbsl_pkt, self.vif4)
self.assertIsNotNone(vif4_pkt)
self.assertTrue(ICMP in vif4_pkt)
self.assertEqual("1.1.1.4", vif4_pkt[IP].src)
self.assertEqual("1.1.1.5", vif4_pkt[IP].dst)
# Check if the packet was sent on tenant_vif3 and received at
# tenant_vif4
self.assertEqual(1, self.vif3.get_vif_ipackets())
self.assertEqual(1, self.vif4.get_vif_opackets())
# Check if the packet was sent to hbs-l and received from hbs-r
self.assertEqual(1, hbs_l_vif.get_vif_opackets())
self.assertEqual(1, hbs_r_vif.get_vif_ipackets())
def test_hbs_right_vm_to_left_vm_intra_vn(self):
# Add hbs-l vif
hbs_l_vif = VirtualVif(
name="tap1589a2b3-22",
ipv4_str="100.100.100.4",
mac_str="00:00:5e:00:01:00",
idx=5,
vrf=3,
flags=constants.VIF_FLAG_HBS_LEFT)
hbs_l_vif.sync()
# Add hbs-r vif
hbs_r_vif = VirtualVif(
name="tap8b05a86b-36",
ipv4_str="200.200.200.4",
mac_str="00:00:5e:00:01:00",
idx=6,
vrf=4,
flags=constants.VIF_FLAG_HBS_RIGHT)
hbs_r_vif.sync()
# Add hbs-l and hbs-r in the vrf table
vrf = Vrf(
vrf_rid=0,
vrf_idx=2,
vrf_flags=constants.VRF_FLAG_VALID |
constants.VRF_FLAG_HBS_L_VALID |
constants.VRF_FLAG_HBS_R_VALID,
vrf_hbfl_vif_idx=5,
vrf_hbfr_vif_idx=6)
vrf.sync()
self.f_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_LEFT
self.r_flow.fr_flags1 = constants.VR_FLOW_FLAG1_HBS_RIGHT
self.f_flow.sync()
self.r_flow.sync()
# send ping request from vif4
icmp = IcmpPacket(
sip='1.1.1.5',
dip='1.1.1.4',
smac='02:e7:03:ea:67:f1',
dmac='02:88:67:0c:2e:11',
icmp_type=constants.ECHO_REPLY,
id=1136)
pkt = icmp.get_packet()
pkt.show()
# send packet and receive on hbs-r
hbsr_pkt = self.vif4.send_and_receive_packet(pkt, hbs_r_vif)
# send packet in hbsl and receive on vif3
vif3_pkt = hbs_l_vif.send_and_receive_packet(hbsr_pkt, self.vif3)
self.assertIsNotNone(vif3_pkt)
self.assertTrue(ICMP in vif3_pkt)
self.assertEqual("1.1.1.5", vif3_pkt[IP].src)
self.assertEqual("1.1.1.4", vif3_pkt[IP].dst)
# Check if the packet was sent on vif4 and received at
# vif3
self.assertEqual(1, self.vif4.get_vif_ipackets())
self.assertEqual(1, self.vif3.get_vif_opackets())
# Check if the packet was sent to hbs-r and received from hbs-l
self.assertEqual(1, hbs_r_vif.get_vif_opackets())
self.assertEqual(1, hbs_l_vif.get_vif_ipackets())
| 32.379217 | 77 | 0.574668 | 3,526 | 23,993 | 3.669881 | 0.073738 | 0.02442 | 0.022179 | 0.013601 | 0.847063 | 0.814529 | 0.785935 | 0.780062 | 0.746291 | 0.731453 | 0 | 0.077287 | 0.321052 | 23,993 | 740 | 78 | 32.422973 | 0.717066 | 0.147293 | 0 | 0.791667 | 0 | 0 | 0.07786 | 0 | 0 | 0 | 0 | 0.001351 | 0.136364 | 1 | 0.015152 | false | 0 | 0.017045 | 0 | 0.043561 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ecde9e6a5950de81a4310cdf0f1672efe48b115e | 146 | py | Python | src/sentry_plugins/jira/__init__.py | dropkitchen/sentry-plugins | 9b9ea3d6e5bc747857fc8158f393a7b3c14dbe8f | [
"Apache-2.0"
] | null | null | null | src/sentry_plugins/jira/__init__.py | dropkitchen/sentry-plugins | 9b9ea3d6e5bc747857fc8158f393a7b3c14dbe8f | [
"Apache-2.0"
] | null | null | null | src/sentry_plugins/jira/__init__.py | dropkitchen/sentry-plugins | 9b9ea3d6e5bc747857fc8158f393a7b3c14dbe8f | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from sentry_plugins.base import assert_package_not_installed
assert_package_not_installed("sentry-jira")
| 24.333333 | 60 | 0.883562 | 20 | 146 | 5.85 | 0.6 | 0.222222 | 0.273504 | 0.42735 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075342 | 146 | 5 | 61 | 29.2 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0.075342 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
01d28769495cb7e1df6e26e8c855a19d15955c04 | 575 | py | Python | train_ricord1a_timm-regnetx_002_coarse_dropout.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | train_ricord1a_timm-regnetx_002_coarse_dropout.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | train_ricord1a_timm-regnetx_002_coarse_dropout.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | import os
ls=["python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold0_coarse_dropout.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold1_coarse_dropout.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold2_coarse_dropout.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold3_coarse_dropout.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold4_coarse_dropout.yml",
]
for l in ls:
os.system(l) | 52.272727 | 109 | 0.855652 | 85 | 575 | 5.376471 | 0.294118 | 0.109409 | 0.131291 | 0.207877 | 0.851204 | 0.851204 | 0.851204 | 0.851204 | 0.851204 | 0.851204 | 0 | 0.045956 | 0.053913 | 575 | 11 | 110 | 52.272727 | 0.794118 | 0 | 0 | 0 | 0 | 0 | 0.885417 | 0.668403 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
17256b95c833c1cb18f6b867961079acce5a7340 | 25,155 | py | Python | utils/utils_Ent_sgd_new.py | shiquanyang/GLMP | 7f085bdd66aa414c8a4efd301810ad81160ac563 | [
"MIT"
] | null | null | null | utils/utils_Ent_sgd_new.py | shiquanyang/GLMP | 7f085bdd66aa414c8a4efd301810ad81160ac563 | [
"MIT"
] | 3 | 2019-09-17T10:56:04.000Z | 2019-10-16T00:13:28.000Z | utils/utils_Ent_sgd_new.py | shiquanyang/GLMP | 7f085bdd66aa414c8a4efd301810ad81160ac563 | [
"MIT"
] | null | null | null | import json
import ast
from utils.utils_general import *
def read_langs_multiwoz(file_name, lang, task, max_line=None):
print(("Reading lines from {}".format(file_name)))
data, context_arr, conv_arr, kb_arr, conv_arr_plain, kb_arr_plain = [], [], [], [], [], []
max_resp_len = 0
with open('data/multiwoz/multiwoz_entities.json') as f:
global_entity = json.load(f)
# dialogue_id_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_dialogue_ids.txt'.format(task, task)
# intents_states_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
dialogue_id_path = '/home/yimeng/shiquan/debiasing-glmp/GLMP/data/MultiWOZ_2.2/{}/{}_dialogue_ids.txt'.format(task, task)
intents_states_path = '/home/yimeng/shiquan/debiasing-glmp/GLMP/data/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
# dialogue_id_path = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/{}_dialogue_ids.txt'.format(task, task)
# intents_states_path = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/{}_intents_states.json'.format(task, task)
dialogue_ids = {}
with open(dialogue_id_path, 'r') as f:
line_cnt = 0
for line in f:
dialogue_ids[line_cnt] = line.strip()
line_cnt += 1
with open(intents_states_path, 'r') as f:
intents_and_states = json.load(f)
with open(file_name) as fin:
cnt_lin, sample_counter, turn_cnt = 0, 1, 1
for line in fin:
line = line.strip()
if line:
if line.startswith("#"):
line = line.replace("#", "")
task_type = line
continue
nid, line = line.split(' ', 1)
if '\t' in line:
# deal with dialogue history
u, r, gold_ent = line.split('\t')
gen_u = generate_memory_multiwoz(u, "$u", str(nid))
context_arr += gen_u
conv_arr += gen_u
conv_arr_plain.append(u)
annotator_id = u.rsplit(' ', 1)[-1]
if annotator_id not in lang.annotator2index.keys():
annotator_id_labels = [lang.annotator2index['NULL']] * (len(r.split())+1)
else:
annotator_id_labels = [lang.annotator2index[annotator_id]] * (len(r.split())+1)
dialogue_id = dialogue_ids[cnt_lin]
intents = intents_and_states[dialogue_id][str(turn_cnt)]['user_intents']
states = intents_and_states[dialogue_id][str(turn_cnt)]['dialogue_states']
if len(states) != 35:
continue
user_intent_labels = [1 if key in intents else 0 for key in lang.intent2index]
dialogue_state_labels = [[lang.state2index[key][states[key]]] for key in states]
# Get gold entity for each domain
gold_ent = ast.literal_eval(gold_ent)
ent_idx_cal, ent_idx_nav, ent_idx_wet = [], [], []
ent_idx_restaurant, ent_idx_hotel, ent_idx_attraction, ent_idx_train, ent_idx_hospital = [], [], [], [], []
if task_type == "restaurant":
ent_idx_restaurant = gold_ent
elif task_type == "hotel":
ent_idx_hotel = gold_ent
elif task_type == "attraction":
ent_idx_attraction = gold_ent
elif task_type == "train":
ent_idx_train = gold_ent
elif task_type == "hospital":
ent_idx_hospital = gold_ent
ent_index = list(
set(ent_idx_restaurant + ent_idx_hotel + ent_idx_attraction + ent_idx_train + ent_idx_hospital))
# Get local pointer position for each word in system response
ptr_index = []
for key in r.split():
index = [loc for loc, val in enumerate(kb_arr_plain) if (val == key and key in ent_index)]
if (index):
index = max(index)
else:
index = len(kb_arr_plain)
ptr_index.append(index)
# Get global pointer labels for words in system response, the 1 in the end is for the NULL token
selector_index = [1 if (word_arr in ent_index or word_arr in r.split()) else 0 for word_arr in
kb_arr_plain] + [1]
sketch_response = generate_template_multiwoz(global_entity, r, gold_ent, kb_arr, task_type)
# obtain gt entity labels
if len(gold_ent) == 0:
ent_labels = len(kb_arr_plain)
elif len(gold_ent) >= 1:
for idx, ent in enumerate(kb_arr_plain):
if ent in gold_ent:
ent_labels = idx
break
data_detail = {
'context_arr': list(context_arr + [['$$$$'] * MEM_TOKEN_SIZE]), # $$$$ is NULL token
'response': r,
'sketch_response': sketch_response,
# 'ptr_index': ptr_index + [len(context_arr)],
'ptr_index': ptr_index + [len(kb_arr_plain)],
# 'ptr_index': ptr_index,
'selector_index': selector_index,
'ent_index': ent_index,
'ent_idx_cal': list(set(ent_idx_cal)),
'ent_idx_nav': list(set(ent_idx_nav)),
'ent_idx_wet': list(set(ent_idx_wet)),
'conv_arr': list(conv_arr),
'conv_arr_plain': list(conv_arr_plain),
'kb_arr': list(kb_arr + [['$$$$'] * MEM_TOKEN_SIZE]),
'id': int(sample_counter),
'ID': int(cnt_lin),
'domain': task_type,
'ent_idx_restaurant': list(set(ent_idx_restaurant)),
'ent_idx_hotel': list(set(ent_idx_hotel)),
'ent_idx_attraction': list(set(ent_idx_attraction)),
'ent_idx_train': list(set(ent_idx_train)),
'ent_idx_hospital': list(set(ent_idx_hospital)),
'kb_arr_plain': list(kb_arr_plain + ["[NULL]"]),
'ent_labels': ent_labels,
'annotator_id_labels': annotator_id_labels,
'user_intent_labels': list(user_intent_labels),
'dialogue_state_labels': dialogue_state_labels,
'kb_arr_new': list(kb_arr_plain + ["[NULL]"]),
}
data.append(data_detail)
gen_r = generate_memory_multiwoz(r, "$s", str(nid))
context_arr += gen_r
conv_arr += gen_r
conv_arr_plain.append(r)
if max_resp_len < len(r.split()):
max_resp_len = len(r.split())
sample_counter += 1
turn_cnt += 1
else:
# deal with knowledge graph
r = line
line_list = line.split(" ")
if line_list[0] not in kb_arr_plain:
kb_arr_plain.append(line_list[0])
if line_list[2] not in kb_arr_plain:
kb_arr_plain.append(line_list[2])
kb_info = generate_memory_multiwoz(r, "", str(nid))
if len(kb_info[0]) > 4:
print(kb_info)
print(r)
context_arr = kb_info + context_arr
kb_arr += kb_info
else:
cnt_lin += 1
turn_cnt = 1
context_arr, conv_arr, kb_arr, conv_arr_plain, kb_arr_plain = [], [], [], [], []
if (max_line and cnt_lin >= max_line):
break
return data, max_resp_len
def generate_template_multiwoz(global_entity, sentence, sent_ent, kb_arr, domain):
"""
Based on the system response and the provided entity table, the output is the sketch response.
"""
sketch_response = []
if sent_ent == []:
sketch_response = sentence.split()
else:
for word in sentence.split():
if word not in sent_ent:
sketch_response.append(word)
else:
ent_type = None
for key in global_entity.keys():
global_entity[key] = [x.lower() for x in global_entity[key]]
if word in global_entity[key] or word.replace('_', ' ') in global_entity[key]:
ent_type = key
break
sketch_response.append('@' + ent_type)
sketch_response = " ".join(sketch_response)
return sketch_response
def generate_memory_multiwoz(sent, speaker, time):
sent_new = []
sent_token = sent.split(' ')
if speaker == "$u" or speaker == "$s":
for idx, word in enumerate(sent_token):
temp = [word, speaker, 'turn' + str(time), 'word' + str(idx)] + ["PAD"] * (MEM_TOKEN_SIZE - 4)
sent_new.append(temp)
else:
sent_token = sent_token[::-1] + ["PAD"] * (MEM_TOKEN_SIZE - len(sent_token))
sent_new.append(sent_token)
return sent_new
def read_langs(file_name, lang, task, max_line=None):
print(("Reading lines from {}".format(file_name)))
data, context_arr, conv_arr, kb_arr, conv_arr_plain, kb_arr_plain = [], [], [], [], [], []
max_resp_len = 0
with open('data/sgd/sgd_entities.json') as f:
global_entity = json.load(f)
# dialogue_id_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_dialogue_ids.txt'.format(task, task)
# intents_states_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
dialogue_id_path = '/home/yimeng/shiquan/GLMP/data/sgd/{}/{}_dialogue_ids.txt'.format(task, task)
intents_states_path = '/home/yimeng/shiquan/GLMP/data/sgd/{}/{}_intents_states.json'.format(task, task)
# dialogue_id_path = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/{}_dialogue_ids.txt'.format(task, task)
# intents_states_path = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/{}_intents_states.json'.format(task, task)
dialogue_ids = {}
with open(dialogue_id_path, 'r') as f:
line_cnt = 0
for line in f:
dialogue_ids[line_cnt] = line.strip()
line_cnt += 1
with open(intents_states_path, 'r') as f:
intents_and_states = json.load(f)
with open(file_name) as fin:
cnt_lin, sample_counter, turn_cnt = 0, 1, 1
for line in fin:
line = line.strip()
if line:
if line.startswith("#"):
line = line.replace("#", "")
task_type = line
continue
nid, line = line.split(' ', 1)
if '\t' in line:
# deal with dialogue history
u, r, gold_ent = line.split('\t')
gen_u = generate_memory(u, "$u", str(nid))
context_arr += gen_u
conv_arr += gen_u
conv_arr_plain.append(u)
annotator_id = u.rsplit(' ', 1)[-1]
if annotator_id not in lang.annotator2index.keys():
annotator_id_labels = [lang.annotator2index['NULL']] * (len(r.split())+1)
else:
annotator_id_labels = [lang.annotator2index[annotator_id]] * (len(r.split())+1)
dialogue_id = dialogue_ids[cnt_lin]
intents = intents_and_states[dialogue_id][str(turn_cnt)]['user_intents']
states = intents_and_states[dialogue_id][str(turn_cnt)]['dialogue_states']
# if len(states) != 35:
# continue
user_intent_labels = [1 if key in intents else 0 for key in lang.intent2index]
dialogue_state_labels = [[lang.state2index[key][states[key]]] for key in states]
# Get gold entity for each domain
gold_ent = ast.literal_eval(gold_ent)
ent_idx_travel, ent_idx_hotel, ent_idx_events, ent_idx_weather, ent_idx_others = [], [], [], [], []
if task_type == "Travel":
ent_idx_travel = gold_ent
elif task_type == "Hotel":
ent_idx_hotel = gold_ent
elif task_type == "Events":
ent_idx_events = gold_ent
elif task_type == "Weather":
ent_idx_weather = gold_ent
else:
ent_idx_others = gold_ent
ent_index = list(
set(ent_idx_travel + ent_idx_hotel + ent_idx_events + ent_idx_weather + ent_idx_others))
# Get local pointer position for each word in system response
ptr_index = []
for key in r.split():
index = [loc for loc, val in enumerate(kb_arr_plain) if (val == key and key in ent_index)]
if (index):
index = max(index)
else:
index = len(kb_arr_plain)
ptr_index.append(index)
# Get global pointer labels for words in system response, the 1 in the end is for the NULL token
selector_index = [1 if (word_arr in ent_index or word_arr in r.split()) else 0 for word_arr in
kb_arr_plain] + [1]
sketch_response = generate_template(global_entity, r, gold_ent, kb_arr, task_type)
# obtain gt entity labels
if len(gold_ent) == 0:
ent_labels = len(kb_arr_plain)
elif len(gold_ent) >= 1:
for idx, ent in enumerate(kb_arr_plain):
if ent in gold_ent:
ent_labels = idx
break
data_detail = {
'context_arr': list(context_arr + [['$$$$'] * MEM_TOKEN_SIZE]), # $$$$ is NULL token
'response': r,
'sketch_response': sketch_response,
# 'ptr_index': ptr_index + [len(context_arr)],
'ptr_index': ptr_index + [len(kb_arr_plain)],
# 'ptr_index': ptr_index,
'selector_index': selector_index,
'ent_index': ent_index,
'ent_idx_travel': list(set(ent_idx_travel)),
'ent_idx_hotel': list(set(ent_idx_hotel)),
'ent_idx_events': list(set(ent_idx_events)),
'ent_idx_weather': list(set(ent_idx_weather)),
'ent_idx_others': list(set(ent_idx_others)),
'conv_arr': list(conv_arr),
'conv_arr_plain': list(conv_arr_plain),
'kb_arr': list(kb_arr + [['$$$$'] * MEM_TOKEN_SIZE]),
'id': int(sample_counter),
'ID': int(cnt_lin),
'domain': task_type,
'kb_arr_plain': list(kb_arr_plain + ["[NULL]"]),
'ent_labels': ent_labels,
'annotator_id_labels': annotator_id_labels,
'user_intent_labels': list(user_intent_labels),
'dialogue_state_labels': dialogue_state_labels,
'kb_arr_new': list(kb_arr_plain + ["[NULL]"]),
}
data.append(data_detail)
gen_r = generate_memory(r, "$s", str(nid))
context_arr += gen_r
conv_arr += gen_r
conv_arr_plain.append(r)
if max_resp_len < len(r.split()):
max_resp_len = len(r.split())
sample_counter += 1
turn_cnt += 1
else:
# deal with knowledge graph
r = line
line_list = line.split(" ")
if line_list[0] not in kb_arr_plain:
kb_arr_plain.append(line_list[0])
if line_list[2] not in kb_arr_plain:
kb_arr_plain.append(line_list[2])
kb_info = generate_memory(r, "", str(nid))
if len(kb_info[0]) > 4:
print(kb_info)
print(r)
context_arr = kb_info + context_arr
kb_arr += kb_info
else:
cnt_lin += 1
turn_cnt = 1
context_arr, conv_arr, kb_arr, conv_arr_plain, kb_arr_plain = [], [], [], [], []
if (max_line and cnt_lin >= max_line):
break
return data, max_resp_len
def generate_template(global_entity, sentence, sent_ent, kb_arr, domain):
"""
Based on the system response and the provided entity table, the output is the sketch response.
"""
sketch_response = []
if sent_ent == []:
sketch_response = sentence.split()
else:
for word in sentence.split():
if word not in sent_ent:
sketch_response.append(word)
else:
ent_type = None
for key in global_entity.keys():
global_entity[key] = [x.lower() for x in global_entity[key]]
if word in global_entity[key] or word.replace('_', ' ') in global_entity[key]:
ent_type = key
break
sketch_response.append('@' + ent_type)
sketch_response = " ".join(sketch_response)
return sketch_response
def generate_memory(sent, speaker, time):
sent_new = []
sent_token = sent.split(' ')
if speaker == "$u" or speaker == "$s":
for idx, word in enumerate(sent_token):
temp = [word, speaker, 'turn' + str(time), 'word' + str(idx)] + ["PAD"] * (MEM_TOKEN_SIZE - 4)
sent_new.append(temp)
else:
sent_token = sent_token[::-1] + ["PAD"] * (MEM_TOKEN_SIZE - len(sent_token))
sent_new.append(sent_token)
return sent_new
def initialize_lang_multiwoz(lang, task):
# path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
path = '/home/yimeng/shiquan/debiasing-glmp/GLMP/data/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
# path = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/{}_intents_states.json'.format(task, task)
with open(path, 'r') as f:
data = json.load(f)
for id in data.keys():
turns_data = data[id]
for turn in turns_data.keys():
dialogue_states = turns_data[turn]['dialogue_states']
user_intents = turns_data[turn]['user_intents']
for ele in user_intents:
lang.index_intent(ele)
for key in dialogue_states:
lang.index_state_values(key, dialogue_states[key])
if task == 'train':
# annotator_id_info_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/MultiWOZ_2.2_Bias_ID.json'
annotator_id_info_path = '/home/yimeng/shiquan/debiasing-glmp/GLMP/data/MultiWOZ_2.2/MultiWOZ_2.2_Bias_ID.json'
# annotator_id_info_path = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/MultiWOZ_2.2_Bias_ID_p=0_5.json'
with open(annotator_id_info_path, 'r') as f:
data = json.load(f)
for key in data:
annotator_id = data[key]
lang.index_annotator(annotator_id)
lang.index_annotator('NULL')
def initialize_lang(lang, task):
# path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/{}/{}_intents_states.json'.format(task, task)
path = '/home/yimeng/shiquan/GLMP/data/sgd/{}/{}_intents_states.json'.format(task, task)
# path = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/{}_intents_states.json'.format(task, task)
with open(path, 'r') as f:
data = json.load(f)
for id in data.keys():
turns_data = data[id]
for turn in turns_data.keys():
dialogue_states = turns_data[turn]['dialogue_states']
user_intents = turns_data[turn]['user_intents']
for ele in user_intents:
lang.index_intent(ele)
for key in dialogue_states:
lang.index_state_values(key, dialogue_states[key])
if task == 'train':
# annotator_id_info_path = '/Users/shiquan/PycharmProjects/deBiasing-Dialogue/Dialogue_Annotator/datasets/MultiWOZ_2.2/MultiWOZ_2.2_Bias_ID.json'
annotator_id_info_path = '/home/yimeng/shiquan/debiasing-glmp/GLMP/data/MultiWOZ_2.2/MultiWOZ_2.2_Bias_ID.json'
# annotator_id_info_path = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/MultiWOZ_2.2_Bias_ID_p=0_5.json'
with open(annotator_id_info_path, 'r') as f:
data = json.load(f)
for key in data:
annotator_id = data[key]
lang.index_annotator(annotator_id)
lang.index_annotator('NULL')
def prepare_data_seq(task, batch_size=100):
# file_train = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/train_utterances_w_kb_w_gold_w_bias_p=0_5_sm.txt'
# file_dev = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/dev_utterances_w_kb_w_gold_w_bias_p=0_5_sm.txt'
# file_test = '/Users/shiquan/PycharmProjects/GLMP/data/multiwoz/test_utterances_w_kb_w_gold_w_bias_p=0_5_sm.txt'
file_train = '/home/yimeng/shiquan/GLMP/data/MultiWOZ_2.2/train/train_utterances_w_kb_w_gold.txt'
file_dev = '/home/yimeng/shiquan/GLMP/data/MultiWOZ_2.2/dev/dev_utterances_w_kb_w_gold.txt'
file_test = '/home/yimeng/shiquan/GLMP/data/MultiWOZ_2.2/test/test_utterances_w_kb_w_gold.txt'
lang = Lang()
for dataset in ('train', 'dev', 'test'):
initialize_lang_multiwoz(lang, dataset)
pair_train, train_max_len = read_langs_multiwoz(file_train, lang, 'train', max_line=None)
train = get_seq(pair_train, lang, batch_size, True)
lang.re_initialize()
# file_train = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/train_utterances_w_kb_w_gold.txt'
# file_dev = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/dev_utterances_w_kb_w_gold.txt'
# file_test = '/Users/shiquan/PycharmProjects/GLMP/data/sgd/test_utterances_w_kb_w_gold.txt'
file_train = '/home/yimeng/shiquan/GLMP/data/sgd/train_utterances_w_kb_w_gold.txt'
file_dev = '/home/yimeng/shiquan/GLMP/data/sgd/dev_utterances_w_kb_w_gold.txt'
file_test = '/home/yimeng/shiquan/GLMP/data/sgd/test_utterances_w_kb_w_gold.txt'
# lang = Lang()
for dataset in ('train', 'dev', 'test'):
initialize_lang(lang, dataset)
pair_train, train_max_len = read_langs(file_train, lang, 'train', max_line=None)
pair_dev, dev_max_len = read_langs(file_dev, lang, 'dev', max_line=None)
pair_test, test_max_len = read_langs(file_test, lang, 'test', max_line=None)
max_resp_len = max(train_max_len, dev_max_len, test_max_len) + 1
train = get_seq(pair_train, lang, batch_size, False)
dev = get_seq(pair_dev, lang, batch_size, False)
test = get_seq(pair_test, lang, batch_size, False)
print("Read %s sentence pairs train" % len(pair_train))
print("Read %s sentence pairs dev" % len(pair_dev))
print("Read %s sentence pairs test" % len(pair_test))
print("Vocab_size: %s " % lang.n_words)
print("Max. length of system response: %s " % max_resp_len)
print("USE_CUDA={}".format(USE_CUDA))
return train, dev, test, [], lang, max_resp_len
def get_data_seq(file_name, lang, max_len, batch_size=1):
pair, _ = read_langs(file_name, max_line=None)
# print(pair)
d = get_seq(pair, lang, batch_size, False)
return d | 49.811881 | 165 | 0.551898 | 3,037 | 25,155 | 4.256832 | 0.065196 | 0.027382 | 0.023205 | 0.038366 | 0.920637 | 0.906714 | 0.890393 | 0.8791 | 0.850557 | 0.844369 | 0 | 0.008142 | 0.340887 | 25,155 | 505 | 166 | 49.811881 | 0.771592 | 0.144186 | 0 | 0.763819 | 0 | 0.012563 | 0.102727 | 0.052948 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025126 | false | 0 | 0.007538 | 0 | 0.052764 | 0.030151 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd9a80f3bee368b20efb09243582617b8db2d27e | 70 | py | Python | Python learn scripts/Basic_functions.py | Akshaychdev/Python-Scripts-Notebooks | f4246c0d7310abaf0b88c4e0f45807d5ab004d88 | [
"MIT"
] | null | null | null | Python learn scripts/Basic_functions.py | Akshaychdev/Python-Scripts-Notebooks | f4246c0d7310abaf0b88c4e0f45807d5ab004d88 | [
"MIT"
] | null | null | null | Python learn scripts/Basic_functions.py | Akshaychdev/Python-Scripts-Notebooks | f4246c0d7310abaf0b88c4e0f45807d5ab004d88 | [
"MIT"
] | null | null | null | def sum(a,b):
return (a+b)
def difference(a,b):
return (a-b)
| 11.666667 | 20 | 0.557143 | 14 | 70 | 2.785714 | 0.428571 | 0.205128 | 0.410256 | 0.461538 | 0.512821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.242857 | 70 | 5 | 21 | 14 | 0.735849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
e50e4d4754d106b6a27adb41db942bab124348da | 2,790 | py | Python | python/problem8.py | chjdev/euler | 48db9e035583c2c3fcce8eed0c911a0e67cff2c6 | [
"BSD-2-Clause"
] | null | null | null | python/problem8.py | chjdev/euler | 48db9e035583c2c3fcce8eed0c911a0e67cff2c6 | [
"BSD-2-Clause"
] | null | null | null | python/problem8.py | chjdev/euler | 48db9e035583c2c3fcce8eed0c911a0e67cff2c6 | [
"BSD-2-Clause"
] | null | null | null | # Largest product in a series
# Problem 8
# The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.
#
# 73167176531330624919225119674426574742355349194934
# 96983520312774506326239578318016984801869478851843
# 85861560789112949495459501737958331952853208805511
# 12540698747158523863050715693290963295227443043557
# 66896648950445244523161731856403098711121722383113
# 62229893423380308135336276614282806444486645238749
# 30358907296290491560440772390713810515859307960866
# 70172427121883998797908792274921901699720888093776
# 65727333001053367881220235421809751254540594752243
# 52584907711670556013604839586446706324415722155397
# 53697817977846174064955149290862569321978468622482
# 83972241375657056057490261407972968652414535100474
# 82166370484403199890008895243450658541227588666881
# 16427171479924442928230863465674813919123162824586
# 17866458359124566529476545682848912883142607690042
# 24219022671055626321111109370544217506941658960408
# 07198403850962455444362981230987879927244284909188
# 84580156166097919133875499200524063689912560717606
# 05886116467109405077541002256983155200055935729725
# 71636269561882670428252483600823257530420752963450
# Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this
# product?
from functools import reduce
N = 13
INPUT = """73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450""".replace('\n', '')
split = INPUT.split('0')
filtered = filter(lambda p: len(p) >= N, split)
expanded = []
for group in filtered:
for i in range(0, len(group) - N + 1):
expanded.append(group[i:i+N])
char_split = map(lambda group: map(int, list(group)), expanded)
products = map(lambda group: reduce(lambda a, b: a*b, group), char_split)
print(max(products))
| 41.641791 | 118 | 0.890681 | 159 | 2,790 | 15.63522 | 0.490566 | 0.011263 | 0.012872 | 0.015286 | 0.852776 | 0.852776 | 0.852776 | 0.852776 | 0.852776 | 0.852776 | 0 | 0.778891 | 0.069534 | 2,790 | 66 | 119 | 42.272727 | 0.177581 | 0.462366 | 0 | 0 | 0 | 0 | 0.693822 | 0.678887 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.032258 | 0 | 0.032258 | 0.032258 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
e535a31fc01d32c92cce78e7e1c924ce024200e3 | 5,268 | py | Python | configs.py | jinxu06/pixel-cnn | 9cad98f3f801bd772815dbb403fb6649ff704dfa | [
"MIT"
] | 1 | 2020-04-19T22:48:42.000Z | 2020-04-19T22:48:42.000Z | configs.py | jinxu06/pixel-cnn | 9cad98f3f801bd772815dbb403fb6649ff704dfa | [
"MIT"
] | null | null | null | configs.py | jinxu06/pixel-cnn | 9cad98f3f801bd772815dbb403fb6649ff704dfa | [
"MIT"
] | null | null | null |
configs = {}
## SVHN
configs["svhn-forward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-forward",
#"save_dir": "/data/ziz/jxu/save-svhn-forward-less-epoch",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"nr_gpu": 8,
}
configs["svhn-backward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-backward",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"nr_gpu": 8,
}
configs["svhn-backward-rename"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-backward-rename",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"nr_gpu": 8,
}
## CelebA
configs["celeba-forward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save-forward",
"nr_filters": 160,
"nr_resnet": 5,
"data_set": "celeba",
"batch_size": 6,
"init_batch_size": 6,
"nr_gpu": 8,
}
configs["celeba-backward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save-backward",
"nr_filters": 160,
"nr_resnet": 5,
"data_set": "celeba",
"batch_size": 6,
"init_batch_size": 6,
"masked": True,
"rot180": True,
"nr_gpu": 8,
}
configs["celeba-backward-rename"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save-backward-rename",
"nr_filters": 160,
"nr_resnet": 5,
"data_set": "celeba",
"batch_size": 6,
"init_batch_size": 6,
"masked": True,
"rot180": True,
"nr_gpu": 8,
}
configs["celeba-hr-forward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-forward",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"save_interval":5,
"nr_gpu":8,
}
configs["celeba-hr-backward"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-backward",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"save_interval":5,
"nr_gpu":8,
}
configs["celeba-hr-backward-rename"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-backward-rename",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"save_interval":5,
"nr_gpu":8,
}
configs["celeba-hr-test"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-forward-new-20-e60",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"save_interval":5,
"nr_gpu":8,
"nr_logistic_mix": 20,
}
configs["celeba-hr-forward-new-20"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-forward-new-20-e40",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"save_interval":5,
"nr_gpu":8,
"nr_logistic_mix": 20,
}
configs["celeba-hr-forward-new-20-missing"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-forward-new-20-missing",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"save_interval":5,
"nr_gpu":8,
"nr_logistic_mix": 20,
"masked": True,
}
configs["celeba-hr-backward-new-20-rename"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/CelebA",
"save_dir": "/data/ziz/jxu/save64-backward-new-20-rename",
"nr_filters": 100,
"nr_resnet": 4,
"data_set": "celeba",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"save_interval":5,
"nr_gpu":8,
'nr_logistic_mix': 20,
}
configs["svhn-forward-20"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-forward-20",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"nr_gpu": 8,
"nr_logistic_mix": 20,
}
configs["svhn-forward-20-missing"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-forward-20-missing",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"nr_gpu": 8,
"nr_logistic_mix": 20,
"masked": True,
}
configs["svhn-backward-20-rename"] = {
"data_dir": "/data/ziz/not-backed-up/jxu/SVHN",
"save_dir": "/data/ziz/jxu/save-svhn-backward-20-rename",
"nr_filters": 100,
"nr_resnet": 5,
"data_set": "svhn",
"batch_size": 8,
"init_batch_size": 8,
"masked": True,
"rot180": True,
"nr_gpu": 8,
"nr_logistic_mix": 20,
}
| 25.205742 | 62 | 0.589598 | 755 | 5,268 | 3.89404 | 0.058278 | 0.078571 | 0.112245 | 0.080952 | 0.963605 | 0.963605 | 0.951361 | 0.940136 | 0.926871 | 0.921769 | 0 | 0.047484 | 0.196469 | 5,268 | 208 | 63 | 25.326923 | 0.647059 | 0.012908 | 0 | 0.740933 | 0 | 0 | 0.575308 | 0.245955 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
00573794a9cf85abe8627574bf86e1604e775bca | 802 | py | Python | contk/metric/__init__.py | victorywys/contk | b33c92922dd1e8eea9a102f051a10749281ac72a | [
"MIT"
] | null | null | null | contk/metric/__init__.py | victorywys/contk | b33c92922dd1e8eea9a102f051a10749281ac72a | [
"MIT"
] | null | null | null | contk/metric/__init__.py | victorywys/contk | b33c92922dd1e8eea9a102f051a10749281ac72a | [
"MIT"
] | null | null | null | r"""
`contk.metrics` provides functions evaluating results of models. It provides
a fair metric for every model.
"""
from .metric import MetricBase, PerlplexityMetric, BleuCorpusMetric, \
SingleTurnDialogRecorder, LanguageGenerationRecorder, MetricChain, \
MultiTurnDialogRecorder, MultiTurnPerplexityMetric, MultiTurnBleuCorpusMetric, \
BleuPrecisionRecallMetric, EmbSimilarityPrecisionRecallMetric, HashValueRecorder
__all__ = ["MetricBase", "PerlplexityMetric", "BleuCorpusMetric", \
"SingleTurnDialogRecorder", "LanguageGenerationRecorder", "MetricChain", \
"MultiTurnDialogRecorder", "MultiTurnPerplexityMetric", "MultiTurnBleuCorpusMetric", \
"BleuPrecisionRecallMetric", "EmbSimilarityPrecisionRecallMetric", "HashValueRecorder"]
| 53.466667 | 100 | 0.770574 | 45 | 802 | 13.644444 | 0.688889 | 0.087948 | 0.140065 | 0.218241 | 0.824104 | 0.824104 | 0.824104 | 0.824104 | 0.824104 | 0.824104 | 0 | 0 | 0.145885 | 802 | 14 | 101 | 57.285714 | 0.89635 | 0.133416 | 0 | 0 | 0 | 0 | 0.368268 | 0.26492 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 1 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
006052b07da6afd0ffb2388cd3f5bfe018e20a20 | 89 | py | Python | torch_ac/utils/__init__.py | jsikyoon/torch-ac | 4d44ed3eb7a81a583a0c9619e0d4fb142a4a3d6b | [
"MIT"
] | 1 | 2021-03-19T02:59:45.000Z | 2021-03-19T02:59:45.000Z | torch_ac/utils/__init__.py | jsikyoon/torch-ac | 4d44ed3eb7a81a583a0c9619e0d4fb142a4a3d6b | [
"MIT"
] | null | null | null | torch_ac/utils/__init__.py | jsikyoon/torch-ac | 4d44ed3eb7a81a583a0c9619e0d4fb142a4a3d6b | [
"MIT"
] | 1 | 2021-12-12T18:22:03.000Z | 2021-12-12T18:22:03.000Z | from torch_ac.utils.dictlist import DictList
from torch_ac.utils.penv import ParallelEnv
| 29.666667 | 44 | 0.865169 | 14 | 89 | 5.357143 | 0.571429 | 0.24 | 0.293333 | 0.426667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089888 | 89 | 2 | 45 | 44.5 | 0.925926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
0068be7172dd534fb448e0c230657846d1f38084 | 10,415 | py | Python | tests/test_pure_state_on_U.py | revilooliver/rp | 9618b640039f911e98e0fa0f5766951bac7e16af | [
"Apache-2.0"
] | 5 | 2020-12-16T07:58:35.000Z | 2022-03-01T16:41:49.000Z | tests/test_pure_state_on_U.py | revilooliver/rp | 9618b640039f911e98e0fa0f5766951bac7e16af | [
"Apache-2.0"
] | null | null | null | tests/test_pure_state_on_U.py | revilooliver/rp | 9618b640039f911e98e0fa0f5766951bac7e16af | [
"Apache-2.0"
] | 1 | 2020-11-19T17:31:49.000Z | 2020-11-19T17:31:49.000Z | # -*- coding: utf-8 -*-
# (C) Copyright Ji Liu and Luciano Bello 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test the ControlOnConstant pass"""
import unittest
from qiskit import QuantumRegister, QuantumCircuit
from qiskit.transpiler import PassManager
from qiskit.extensions import HGate
from qiskit.compiler import transpile
from qiskit.test import QiskitTestCase
from qiskit.test.mock import FakeRueschlikon
from purestate import PureStateOnU
from qiskit.transpiler.passes import Optimize1qGates
from qiskit.converters import circuit_to_dag
from qiskit.transpiler.passes import Unroller
from purestate import ASwapGate
import numpy as np
class PureStateTestCase(QiskitTestCase):
def assertEqualUnroll(self, basis, circuit, expected, pass_=None):
""" Compares the dags after unrolling to basis """
passmanager = PassManager()
unrollpass = Unroller(basis)
if pass_ is not None:
for passes in pass_:
passmanager.append(passes)
passmanager.append(unrollpass)
circuit_result = passmanager.run(circuit)
expected_result = passmanager.run(expected)
self.assertEqual(circuit_result, expected_result)
class TestSWAPGates(PureStateTestCase):
def test_same_const_swap(self):
"""Input states are the same, remove the swap gate
|phi> --X---- |phi> --
| =>
|phi>--X---- |phi> --
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 2.34, 3.04, qr[0])
circuit.u3(1.23, 2.34, 3.04, qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.u3(1.23, 2.34, 3.04, qr[0])
expected.u3(1.23, 2.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_two_unknown_swap(self):
"""Input states are the same, remove the swap gate
|phi> --X---- |phi> --
| =>
|phi>--X---- |phi> --
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.reset(qr[0])
circuit.reset(qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.reset(qr[0])
expected.reset(qr[1])
expected.swap(qr[0], qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_two_const_swap(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi>--X---- |psi> --U3^--
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 3.34, 3.04, qr[0])
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.u3(2.22, 1.67, 0.66, qr[0])
expected.u3(1.23, 3.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_two_const_swap2(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi> --X---- |psi> --U3^--
"""
qr = QuantumRegister(4, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 3.34, 3.04, qr[0])
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.u3(2.22, 1.67, 0.66, qr[0])
expected.u3(1.23, 3.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_two_const_swap3(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi>--X---- |psi> --U3^--
"""
qr = QuantumRegister(4, 'qreg')
qr2 = QuantumRegister(4, 'qreg2')
circuit = QuantumCircuit(qr, qr2)
circuit.u3(1.23, 3.34, 3.04, qr[0])
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr, qr2)
expected.u3(2.22, 1.67, 0.66, qr[0])
expected.u3(1.23, 3.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_two_const_swap4(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi>--X---- |psi> --U3^--
"""
qr = QuantumRegister(4, 'qreg')
qr2 = QuantumRegister(4, 'qreg2')
circuit = QuantumCircuit(qr, qr2)
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr, qr2)
expected.u3(2.22, 1.67, 0.66, qr[0])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_first_zero_swap(self):
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.reset(qr[1])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.reset(qr[1])
expected.cx(qr[1], qr[0])
expected.cx(qr[0], qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqualUnroll(['cx'], result, expected)
def test_second_zero_swap(self):
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.reset(qr[0])
circuit.swap(qr[0], qr[1])
expected = QuantumCircuit(qr)
expected.reset(qr[0])
expected.cx(qr[0], qr[1])
expected.cx(qr[1], qr[0])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqualUnroll(['cx'], result, expected)
class TestASWAPGates(PureStateTestCase):
def test_same_const_aswap(self):
"""Input states are the same, remove the swap gate
|phi> --X---- |phi> --
| =>
|phi>--X---- |phi> --
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 2.34, 3.04, qr[0])
circuit.u3(1.23, 2.34, 3.04, qr[1])
circuit.append(ASwapGate(), [qr[1], qr[0]])
expected = QuantumCircuit(qr)
expected.u3(1.23, 2.34, 3.04, qr[0])
expected.u3(1.23, 2.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_same_const_aswap_reverse(self):
"""Input states are the same, remove the swap gate
|phi> --X---- |phi> --
| =>
|phi>--X---- |phi> --
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 2.34, 3.04, qr[0])
circuit.u3(1.23, 2.34, 3.04, qr[1])
circuit.append(ASwapGate(), [qr[0], qr[1]])
expected = QuantumCircuit(qr)
expected.u3(1.23, 2.34, 3.04, qr[0])
expected.u3(1.23, 2.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_different_const_aswap(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi>--X---- |psi> --U3^--
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 3.34, 3.04, qr[0])
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.append(ASwapGate(), [qr[1], qr[0]])
expected = QuantumCircuit(qr)
expected.u3(2.22, 1.67, 0.66, qr[0])
expected.u3(1.23, 3.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
def test_different_const_aswap_reverse(self):
"""Input states are the different pure states, replace the swap with two single qubit gates
|phi> --X---- |phi> --U3--
| =>
|psi>--X---- |psi> --U3^--
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.u3(1.23, 3.34, 3.04, qr[0])
circuit.u3(2.22, 1.67, 0.66, qr[1])
circuit.append(ASwapGate(), [qr[0], qr[1]])
expected = QuantumCircuit(qr)
expected.u3(2.22, 1.67, 0.66, qr[0])
expected.u3(1.23, 3.34, 3.04, qr[1])
passmanager = PassManager()
passmanager.append(PureStateOnU())
passmanager.append(Optimize1qGates())
result = passmanager.run(circuit)
self.assertEqual(expected, result)
if __name__ == '__main__':
unittest.main()
| 33.705502 | 99 | 0.571483 | 1,274 | 10,415 | 4.628728 | 0.124019 | 0.019332 | 0.018654 | 0.026115 | 0.792606 | 0.768187 | 0.768187 | 0.764965 | 0.752247 | 0.752247 | 0 | 0.063105 | 0.286414 | 10,415 | 308 | 100 | 33.814935 | 0.730355 | 0.200288 | 0 | 0.78534 | 0 | 0 | 0.006339 | 0 | 0 | 0 | 0 | 0 | 0.073298 | 1 | 0.068063 | false | 0.282723 | 0.068063 | 0 | 0.151832 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
006de5e2e5c870d7d622ff7bcaf7b705ab732c25 | 43,741 | py | Python | maze.py | enes3632/test | 78840be0daee2811cab84a8583224228a5616239 | [
"MIT"
] | null | null | null | maze.py | enes3632/test | 78840be0daee2811cab84a8583224228a5616239 | [
"MIT"
] | null | null | null | maze.py | enes3632/test | 78840be0daee2811cab84a8583224228a5616239 | [
"MIT"
] | null | null | null | # COMP9021 20T3 - Rachid Hamadi
# Assignment 2 *** Due Sunday Week 10 @ 10.00pm
# DO *NOT* WRITE YOUR NAME TO MAINTAIN ANONYMITY FOR PLAGIARISM DETECTION
# IMPORT ANY REQUIRED MOD
from collections import deque
import numpy as np
from copy import deepcopy
import difflib
class MazeError(Exception):
def __init__(self, message):
self.message = message
class Maze:
def __init__(self, filename):
self.filename=filename
global texname
texname = self.filename
f=open(self.filename, 'r')
global matrix
matrix = [ x.split() for x in f] # read the files
matrix = [x for x in matrix if x!=[]] #delete spaces
for x in range(len(matrix)): # separate digits with no separators
if len(matrix[x])==1:
matrix[x] = list(matrix[x][0])
matrix = [[int(y) for y in x] for x in matrix] # convert to integer
#checks the metrix's size. it must be at least 2x2
if len(matrix)==1 or len(matrix[0])==1:
raise MazeError('Incorrect input.')
#checks if hte matrix has invalid character or a line with different size
for x,y in enumerate(matrix):
for t in y:
if t not in [0,1,2,3]:
raise MazeError('Incorrect input.')
elif x==len(matrix)-1 and t in [2,3]:
raise MazeError('Input does not represent a maze.')
elif t==matrix[x][-1] and t in [1,3]:
raise MazeError('Input does not represent a maze.')
if len(y)!=len(matrix[0]):
raise MazeError('Incorrect input.')
texname2=[x for x in texname]
texname2[-2]='e'
texname2[-1]='x'
texname=''.join(texname2)
def analyse_messages(self, name, N): #produce analyse print messages
self.name = name
self.N = str(N)
x = 'The maze has '
if self.name=='gate':
if N==0:
y = 'no gate.'
elif N==1:
y = 'a single gate.'
elif N > 1:
y = self.N + ' gates.'
elif self.name=='wall':
if N==0:
y = 'no wall.'
elif N==1:
y = 'walls that are all connected.'
elif N > 1:
y = self.N + ' sets of walls that are all connected.'
elif self.name=='inner':
if N==0:
y = 'no inaccessible inner point.'
elif N==1:
y = 'a unique inaccessible inner point.'
elif N > 1:
y = self.N + ' inaccessible inner points.'
elif self.name=='accessible':
if N==0:
y = 'no accessible area.'
elif N==1:
y = 'a unique accessible area.'
elif N > 1:
y = self.N + ' accessible areas.'
elif self.name=='culdesac':
if N==0:
y = 'no accessible cul-de-sac.'
elif N==1:
y = 'accessible cul-de-sacs that are all connected.'
elif N > 1:
y = self.N + ' sets of accessible cul-de-sacs that are all connected.'
elif self.name=='path':
if N==0:
y = 'no entry-exit path with no intersection not to cul-de-sacs.'
elif N==1:
y = 'a unique entry-exit path with no intersection not to cul-de-sacs.'
elif N > 1:
y = self.N + ' entry-exit paths with no intersections not to cul-de-sacs.'
return x + y
def analyse(self):
#print('matrix',*matrix,sep='\n')
################################################################
#####################--------gates--------######################
################################################################
top_gates=len([x for x in matrix[0][:-1] if x in [0, 2]])
left_gates=len([matrix[x][0] for x in range(len(matrix)-1) if matrix[x][0] in [0, 1]])
right_gates=len([matrix[x][-1] for x in range(len(matrix)-1) if matrix[x][-1]==0])
bottom_gates=len([x for x in matrix[-1][:-1] if x==0])
total_gates = top_gates + left_gates + right_gates + bottom_gates
print(Maze.analyse_messages(self,'gate',total_gates))
################################################################
#####################--------wales--------######################
################################################################
def check_visited(matrix, x, y, visited): #checks if it was previously visited
return (x>-1) and (x<len(visited)) and (y>-1) and (y<len(visited[0])) and (not visited[x][y])
def check(matrix, i, j, visited, node):
quit = deque()
quit.append((i,j))
visited[i][j] = True
while quit:
x,y = quit.popleft()
node = matrix[x][y]
if node==1:
col1=[0,0,-1]
row1=[1,-1,0]
for k in range(3):
if check_visited(matrix, x + col1[k], y + row1[k], visited):
if col1[k]==-1 and matrix[x + col1[k]][y + row1[k]] in [2,3]:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
if row1[k]==-1 and matrix[x + col1[k]][y + row1[k]] in [1,3]:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
if row1[k]==1:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
elif node==2:
col2=[1,-1,0]
row2=[0,0,-1]
for k in range(3):
if check_visited(matrix, x + col2[k], y + row2[k], visited):
if col2[k]==-1 and matrix[x + col2[k]][y + row2[k]] in [2,3]:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
if row2[k]==-1 and matrix[x + col2[k]][y + row2[k]] in [1,3]:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
if col2[k]==1:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
elif node==3:
col3=[1,-1,0,0]
row3=[0,0,-1,1]
for k in range(4):
if check_visited(matrix, x + col3[k], y + row3[k], visited):
if col3[k]==-1 and matrix[x + col3[k]][y + row3[k]] in [2,3]:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if row3[k]==-1 and matrix[x + col3[k]][y + row3[k]] in [1,3]:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if col3[k]==1:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if row3[k]==1:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
elif node==0:
col0=[-1,0]
row0=[0,-1]
for k in range(2):
if check_visited(matrix, x + col0[k], y + row0[k], visited):
if col0[k]==-1 and matrix[x + col0[k]][y + row0[k]] in [2,3]:
visited[x + col0[k]][y + row0[k]] = True
quit.append((x + col0[k], y + row0[k]))
if row0[k]==-1 and matrix[x + col0[k]][y + row0[k]] in [1,3]:
visited[x + col0[k]][y + row0[k]] = True
quit.append((x + col0[k], y + row0[k]))
visited = [[False for x in range(len(matrix[0]))] for y in range(len(matrix))]
walls=0
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j]!=0 and not visited[i][j]:
check(matrix, i, j, visited, matrix[i][j])
walls +=1
print(Maze.analyse_messages(self,'wall',walls))
###############################################################
#######--------inner points & accessible areas--------#########
###############################################################
def check_visited4(matrixx, x, y, visited4): #checks if it was previously visited
return (x>-1) and (x<len(matrixx)) and (y>-1) and (y<len(matrixx[0])) and visited4[x][y]
def find_the_size_of_the_culdesac(i, j, matrixwalls): #used to find how deep the culdesac is
quit = deque()
quit.append((i,j))
culdesacs3 = 0
coordinates2 = []
coordinates2.append([i,j])
matrixwalls[i][j] = False
while quit:
x, y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
safe = 0
for k in range(4):
if (x + col[k]>-1) and (x + col[k]<len(matrixwalls)) and\
(y + row[k]>-1) and (y + row[k]<len(matrixwalls[0]))and \
not matrixwalls[x + col[k]][y + row[k]]!='w' and\
matrixwalls[x + col[k]][y + row[k]]:
if col[k]==-1:
visited6[x-1][y] = False
quit.append((x-1, y))
#coordinates2.append([x-1, y])
safe+=1
if col[k]==1:
visited6[x+1][y] = False
quit.append((x+1, y))
#coordinates2.append([x+1, y])
safe+=1
if row[k]==-1:
visited6[x][y-1] = False
quit.append((x, y-1))
#coordinates2.append([x, y-1])
safe+=1
if row[k]==1:
visited6[x][y+1] = False
quit.append((x, y+1))
#coordinates2.append([x, y+1])
safe+=1
if safe>1:
quit.clear()
for x in coordinates2:
coordinates.append(x)
else:
coordinates.append([x,y])
# if safe>1:
# del coordinates[-3:]
# for x in range(len(coordinates)):
# if coordinates[x][0] % 2 ==1 and coordinates[x][1] % 2 ==1:
# culdesacs3+=1
# if culdesacs3!=0:
# return culdesacs3
# elif safe<1:
# return 0
def check_culdesac(i, j, matrixwalls): #finds culdesacs in accesible areas with multiple gates
#visited6 = deepcopy(visited5) #used to find how deep the culdesac is / the size of the whole
quit = deque()
quit.append((i,j))
culdesacs2 = 0
matrixwalls[i][j] = False
visited6[i][j] = False
while quit:
x, y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
no_of_walls = 0
for k in range(4):
if (x + col[k]>-1) and (x + col[k]<len(matrixwalls)) and\
(y + row[k]>-1) and (y + row[k]<len(matrixwalls[0])):
if matrixwalls[x + col[k]][y + row[k]]!='w':
if check_visited4(matrixx, x + col[k], y + row[k], visited6):
if col[k]==-1:
visited6[x-1][y] = False
quit.append((x-1, y))
if col[k]==1:
visited6[x+1][y] = False
quit.append((x+1, y))
if row[k]==-1:
visited6[x][y-1] = False
quit.append((x, y-1))
if row[k]==1:
visited6[x][y+1] = False
quit.append((x, y+1))
else:
no_of_walls+=1
if no_of_walls==3 and x!=0 and x!=len(matrixwalls)-1 and y!=0 and y!=len(matrixwalls[0])-1:
culdesacs2+=1
find_the_size_of_the_culdesac(x, y, matrixwalls)
if culdesacs2!=0:
return culdesacs2
else:
return 0
def accessible_areas(matrixx, i, j, visited4):
quit = deque()
quit.append((i,j))
gates = 0
global areas
global inners
global culdesacs
global onepaths
areas = 0
inners = 0
culdesacs = 0
onepaths = 0
coordinates3 = []
if visited4[i][j]:
points = 0
coordinates3.append([i,j])
visited4[i][j] = False
while quit:
x,y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
points+=1
for k in range(4):
if check_visited4(matrixx, x + col[k], y + row[k], visited4):
if col[k]==-1:
visited4[x-1][y] = False
quit.append((x-1, y))
coordinates3.append([x-1, y])
if col[k]==1:
visited4[x+1][y] = False
quit.append((x+1, y))
coordinates3.append([x+1, y])
if row[k]==-1:
visited4[x][y-1] = False
quit.append((x, y-1))
coordinates3.append([x, y-1])
if row[k]==1:
visited4[x][y+1] = False
quit.append((x, y+1))
coordinates3.append([x, y+1])
for x in range(len(coordinates3)):
if coordinates3[x][0] in [0, len(visited4)-1]\
or coordinates3[x][1] in [0, len(visited4[0])-1]:
gates+=1
if gates==0:
for x in range(len(coordinates3)):
if coordinates3[x][0] % 2 ==1 and coordinates3[x][1] % 2 ==1:
inners+=1
return inners
elif gates>0:
if gates==1:
for x in coordinates3:
if x[0] % 2 ==1 and x[1] % 2 ==1:
coordinates.append(x)
culdesacs+=1
elif gates>1:
culdesacs += check_culdesac(coordinates3[-1][0], coordinates3[-1][1], matrixwalls)
if gates==2 and onepaths>=0:
onepaths+=1
areas+=1
return areas
matrixx = []
for x in range(2*len(matrix)-1):
matrixx.append([])
for y in range(2*len(matrix[0])-1):
matrixx[x].append(True)
for x in range(len(matrix)):
for y in range(len(matrix[0])):
matrixx[2*x][2*y]=matrix[x][y]
matrixwalls = deepcopy(matrixx)
for x in range(len(matrixx)):
for y in range(len(matrixx[0])):
if str(matrixx[x][y])=='0':
matrixx[x][y]=False
elif str(matrixx[x][y])=='1':
matrixx[x][y+1]=False
matrixx[x][y]=False
elif matrixx[x][y]==2:
matrixx[x+1][y]=False
matrixx[x][y]=False
elif matrixx[x][y]==3:
matrixx[x+1][y]=False
matrixx[x][y+1]=False
matrixx[x][y]=False
for x in range(len(matrixwalls)):
for y in range(len(matrixwalls[0])):
if str(matrixwalls[x][y])=='0':
matrixwalls[x][y]='w'
elif str(matrixwalls[x][y])=='1':
matrixwalls[x][y+1]='w'
matrixwalls[x][y]='w'
elif matrixwalls[x][y]==2:
matrixwalls[x+1][y]='w'
matrixwalls[x][y]='w'
elif matrixwalls[x][y]==3:
matrixwalls[x+1][y]='w'
matrixwalls[x][y+1]='w'
matrixwalls[x][y]='w'
area = 0
inner = 0
culdesac = 0
onepath = 0
visited4 = deepcopy(matrixx)
visited5 = deepcopy(matrixx) #used in def check_culdesac function
visited6 = deepcopy(visited5) #used to find how deep the culdesac is / the size of the whole
global coordinates
coordinates = []
for i in range(len(matrixx)):
for j in range(len(matrixx[0])):
if matrixx[i][j]:
a = accessible_areas(matrixx, i, j, visited4)
if areas!=0:
area+=a
culdesac+=culdesacs
onepath+=onepaths
elif inners!=0:
inner+=a
print(Maze.analyse_messages(self, 'inner', inner))
print(Maze.analyse_messages(self, 'accessible', area))
print(Maze.analyse_messages(self, 'culdesac', culdesac))
print(Maze.analyse_messages(self, 'path', onepath))
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------------------
def display(self):
################################################################
#####################--------gates--------######################
################################################################
top_gates=len([x for x in matrix[0][:-1] if x in [0, 2]])
left_gates=len([matrix[x][0] for x in range(len(matrix)-1) if matrix[x][0] in [0, 1]])
right_gates=len([matrix[x][-1] for x in range(len(matrix)-1) if matrix[x][-1]==0])
bottom_gates=len([x for x in matrix[-1][:-1] if x==0])
total_gates = top_gates + left_gates + right_gates + bottom_gates
################################################################
#####################--------wales--------######################
################################################################
def check_visited(matrix, x, y, visited): #checks if it was previously visited
return (x>-1) and (x<len(visited)) and (y>-1) and (y<len(visited[0])) and (not visited[x][y])
def check(matrix, i, j, visited, node):
quit = deque()
quit.append((i,j))
visited[i][j] = True
while quit:
x,y = quit.popleft()
node = matrix[x][y]
if node==1:
col1=[0,0,-1]
row1=[1,-1,0]
for k in range(3):
if check_visited(matrix, x + col1[k], y + row1[k], visited):
if col1[k]==-1 and matrix[x + col1[k]][y + row1[k]] in [2,3]:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
if row1[k]==-1 and matrix[x + col1[k]][y + row1[k]] in [1,3]:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
if row1[k]==1:
visited[x + col1[k]][y + row1[k]] = True
quit.append((x + col1[k], y + row1[k]))
elif node==2:
col2=[1,-1,0]
row2=[0,0,-1]
for k in range(3):
if check_visited(matrix, x + col2[k], y + row2[k], visited):
if col2[k]==-1 and matrix[x + col2[k]][y + row2[k]] in [2,3]:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
if row2[k]==-1 and matrix[x + col2[k]][y + row2[k]] in [1,3]:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
if col2[k]==1:
visited[x + col2[k]][y + row2[k]] = True
quit.append((x + col2[k], y + row2[k]))
elif node==3:
col3=[1,-1,0,0]
row3=[0,0,-1,1]
for k in range(4):
if check_visited(matrix, x + col3[k], y + row3[k], visited):
if col3[k]==-1 and matrix[x + col3[k]][y + row3[k]] in [2,3]:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if row3[k]==-1 and matrix[x + col3[k]][y + row3[k]] in [1,3]:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if col3[k]==1:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
if row3[k]==1:
visited[x + col3[k]][y + row3[k]] = True
quit.append((x + col3[k], y + row3[k]))
elif node==0:
col0=[-1,0]
row0=[0,-1]
for k in range(2):
if check_visited(matrix, x + col0[k], y + row0[k], visited):
if col0[k]==-1 and matrix[x + col0[k]][y + row0[k]] in [2,3]:
visited[x + col0[k]][y + row0[k]] = True
quit.append((x + col0[k], y + row0[k]))
if row0[k]==-1 and matrix[x + col0[k]][y + row0[k]] in [1,3]:
visited[x + col0[k]][y + row0[k]] = True
quit.append((x + col0[k], y + row0[k]))
visited = [[False for x in range(len(matrix[0]))] for y in range(len(matrix))]
walls=0
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j]!=0 and not visited[i][j]:
check(matrix, i, j, visited, matrix[i][j])
walls +=1
###############################################################
#######--------inner points & accessible areas--------#########
###############################################################
def check_visited4(matrixx, x, y, visited4): #checks if it was previously visited
return (x>-1) and (x<len(matrixx)) and (y>-1) and (y<len(matrixx[0])) and visited4[x][y]
def find_the_size_of_the_culdesac(i, j, matrixwalls): #used to find how deep the culdesac is
quit = deque()
quit.append((i,j))
culdesacs3 = 0
coordinates2 = []
coordinates2.append([i,j])
matrixwalls[i][j] = False
while quit:
x, y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
safe = 0
for k in range(4):
if (x + col[k]>-1) and (x + col[k]<len(matrixwalls)) and\
(y + row[k]>-1) and (y + row[k]<len(matrixwalls[0]))and \
not matrixwalls[x + col[k]][y + row[k]]!='w' and\
matrixwalls[x + col[k]][y + row[k]]:
if col[k]==-1:
visited6[x-1][y] = False
quit.append((x-1, y))
#coordinates2.append([x-1, y])
safe+=1
if col[k]==1:
visited6[x+1][y] = False
quit.append((x+1, y))
#coordinates2.append([x+1, y])
safe+=1
if row[k]==-1:
visited6[x][y-1] = False
quit.append((x, y-1))
#coordinates2.append([x, y-1])
safe+=1
if row[k]==1:
visited6[x][y+1] = False
quit.append((x, y+1))
#coordinates2.append([x, y+1])
safe+=1
if safe>1:
quit.clear()
for x in coordinates2:
coordinates.append(x)
else:
coordinates.append([x,y])
# if safe>1:
# del coordinates[-3:]
# for x in range(len(coordinates)):
# if coordinates[x][0] % 2 ==1 and coordinates[x][1] % 2 ==1:
# culdesacs3+=1
# if culdesacs3!=0:
# return culdesacs3
# elif safe<1:
# return 0
def check_culdesac(i, j, matrixwalls): #finds culdesacs in accesible areas with multiple gates
#visited6 = deepcopy(visited5) #used to find how deep the culdesac is / the size of the whole
quit = deque()
quit.append((i,j))
culdesacs2 = 0
matrixwalls[i][j] = False
visited6[i][j] = False
while quit:
x, y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
no_of_walls = 0
for k in range(4):
if (x + col[k]>-1) and (x + col[k]<len(matrixwalls)) and\
(y + row[k]>-1) and (y + row[k]<len(matrixwalls[0])):
if matrixwalls[x + col[k]][y + row[k]]!='w':
if check_visited4(matrixx, x + col[k], y + row[k], visited6):
if col[k]==-1:
visited6[x-1][y] = False
quit.append((x-1, y))
if col[k]==1:
visited6[x+1][y] = False
quit.append((x+1, y))
if row[k]==-1:
visited6[x][y-1] = False
quit.append((x, y-1))
if row[k]==1:
visited6[x][y+1] = False
quit.append((x, y+1))
else:
no_of_walls+=1
if no_of_walls==3 and x!=0 and x!=len(matrixwalls)-1 and y!=0 and y!=len(matrixwalls[0])-1:
culdesacs2+=1
find_the_size_of_the_culdesac(x, y, matrixwalls)
if culdesacs2!=0:
return culdesacs2
else:
return 0
def accessible_areas(matrixx, i, j, visited4):
quit = deque()
quit.append((i,j))
gates = 0
global areas
global inners
global culdesacs
global onepaths
areas = 0
inners = 0
culdesacs = 0
onepaths = 0
coordinates3 = []
if visited4[i][j]:
points = 0
coordinates3.append([i,j])
visited4[i][j] = False
while quit:
x,y = quit.popleft()
col=[-1,1,0,0]
row=[0,0,-1,1]
points+=1
for k in range(4):
if check_visited4(matrixx, x + col[k], y + row[k], visited4):
if col[k]==-1:
visited4[x-1][y] = False
quit.append((x-1, y))
coordinates3.append([x-1, y])
if col[k]==1:
visited4[x+1][y] = False
quit.append((x+1, y))
coordinates3.append([x+1, y])
if row[k]==-1:
visited4[x][y-1] = False
quit.append((x, y-1))
coordinates3.append([x, y-1])
if row[k]==1:
visited4[x][y+1] = False
quit.append((x, y+1))
coordinates3.append([x, y+1])
for x in range(len(coordinates3)):
if coordinates3[x][0] in [0, len(visited4)-1]\
or coordinates3[x][1] in [0, len(visited4[0])-1]:
gates+=1
if gates==0:
for x in range(len(coordinates3)):
if coordinates3[x][0] % 2 ==1 and coordinates3[x][1] % 2 ==1:
inners+=1
return inners
elif gates>0:
if gates==1:
for x in coordinates3:
if x[0] % 2 ==1 and x[1] % 2 ==1:
coordinates.append(x)
culdesacs+=1
elif gates>1:
culdesacs += check_culdesac(coordinates3[-1][0], coordinates3[-1][1], matrixwalls)
if gates==2 and onepaths>=0:
for x in coordinates3:
if x[0] % 2 ==1 and x[1] % 2 ==1 or (x[0]==0 or x[1]==0) or\
(x[0]==len(coordinates3) or x[1]==len(coordinates3[0])):
coordinates4.append(x)
# elif x[0]==0 or x[1]==0:
# coordinates5.append(x)
onepaths+=1
areas+=1
return areas
matrixx = []
for x in range(2*len(matrix)-1):
matrixx.append([])
for y in range(2*len(matrix[0])-1):
matrixx[x].append(True)
for x in range(len(matrix)):
for y in range(len(matrix[0])):
matrixx[2*x][2*y]=matrix[x][y]
matrixwalls = deepcopy(matrixx)
for x in range(len(matrixx)):
for y in range(len(matrixx[0])):
if str(matrixx[x][y])=='0':
matrixx[x][y]=False
elif str(matrixx[x][y])=='1':
matrixx[x][y+1]=False
matrixx[x][y]=False
elif matrixx[x][y]==2:
matrixx[x+1][y]=False
matrixx[x][y]=False
elif matrixx[x][y]==3:
matrixx[x+1][y]=False
matrixx[x][y+1]=False
matrixx[x][y]=False
for x in range(len(matrixwalls)):
for y in range(len(matrixwalls[0])):
if str(matrixwalls[x][y])=='0':
matrixwalls[x][y]='w'
elif str(matrixwalls[x][y])=='1':
matrixwalls[x][y+1]='w'
matrixwalls[x][y]='w'
elif matrixwalls[x][y]==2:
matrixwalls[x+1][y]='w'
matrixwalls[x][y]='w'
elif matrixwalls[x][y]==3:
matrixwalls[x+1][y]='w'
matrixwalls[x][y+1]='w'
matrixwalls[x][y]='w'
area = 0
inner = 0
culdesac = 0
onepath = 0
visited4 = deepcopy(matrixx)
visited5 = deepcopy(matrixx) #used in def check_culdesac function
visited6 = deepcopy(visited5) #used to find how deep the culdesac is / the size of the whole
global coordinates
global coordinates4
global coordinates5
coordinates = []
coordinates4 = []
coordinates5 = []
for i in range(len(matrixx)):
for j in range(len(matrixx[0])):
if matrixx[i][j]:
a = accessible_areas(matrixx, i, j, visited4)
if areas!=0:
area+=a
culdesac+=culdesacs
onepath+=onepaths
elif inners!=0:
inner+=a
###############################################################
#################--------draw walls--------####################
###############################################################
hor = 0
hor_list_1 = []
hor_list_2 = []
for x in range(len(matrix)):
for y in range(len(matrix[0])):
if matrix[x][y] in [1,3] and hor==0:
hor+=1
hor_list_1.append([y,x])
elif not matrix[x][y] in [1,3] and hor!=0:
hor_list_2.append([y,x])
hor = 0
ver = 0
ver_list_1 = []
ver_list_2 = []
for y in range(len(matrix[0])):
for x in range(len(matrix)):
if matrix[x][y] in [2,3] and ver==0:
ver+=1
ver_list_1.append([y,x])
elif not matrix[x][y] in [2,3] and ver!=0:
ver_list_2.append([y,x])
ver = 0
###############################################################
################--------draw pillars--------###################
###############################################################
pillars = []
for x in range(len(matrix)):
for y in range(len(matrix[0])):
if matrix[x][y]==0:
if x==0 and y==0:
pillars.append([y,x])
elif x==0 and y!=0 and matrix[0][y-1]==2:
pillars.append([y,x])
elif x!=0 and y==0 and matrix[x-1][0]==1:
pillars.append([y,x])
elif x!=0 and y!=0 and matrix[x-1][y] in [0,1] and matrix[x][y-1] in [0,2]:
pillars.append([y,x])
###############################################################
###############--------draw culdesacs--------##################
###############################################################
for x in coordinates:
i=x[0]
j=x[1]
x[0]=j/2
x[1]=i/2
coordinates.sort()
global culdesac_nodes
culdesac_nodes = coordinates
###############################################################
#################--------draw path--------#####################
###############################################################
for x in coordinates4:
i=x[0]
j=x[1]
x[0]=j/2
x[1]=i/2
for x in coordinates5:
i=x[0]
j=x[1]
x[0]=j/2
x[1]=i/2
if x[0]==0:
x[0]=-0.5
elif x[1]==0:
x[1]=-0.5
# for x in coordinates5:
# coordinates4.append(x)
horizons = []
verticals = []
for index, x in enumerate(coordinates4):
if index < len(coordinates4)-1:
if x[0]==coordinates4[index+1][0]:
verticals.append([x,coordinates4[index+1]])
if x[1]==coordinates4[index+1][1]:
horizons.append([x,coordinates4[index+1]])
horizons2=[]
verticals2=[]
for x in horizons:
horizons2.append(x[0])
horizons2.append(x[1])
for x in verticals:
verticals2.append(x[0])
verticals2.append(x[1])
horizons2.sort()
verticals2.sort()
verticals_0=[]
for x in verticals2:
verticals_0.append(x[0])
for x in range(len(verticals_0)):
y = verticals_0.count(verticals_0[x])
if y>2:
del verticals2[x+1:x+y-1]
horizons_1=[]
for x in horizons2:
horizons_1.append(x[1])
for x in range(len(horizons_1)):
y = horizons_1.count(horizons_1[x])
if y>2:
del horizons2[x+1:x+y-1]
for x in horizons2:
if x[0]==0:
x[0]=-0.5
elif x[1]==0:
x[1]=-0.5
for x in verticals2:
if x[0]==0:
x[0]=-0.5
elif x[1]==0:
x[1]=-0.5
horizons2.sort()
verticals2.sort()
ff=open(texname, 'w')
ff.write('\\documentclass[10pt]{article}\n'
'\\usepackage{tikz}\n'
'\\usetikzlibrary{shapes.misc}\n'
'\\usepackage[margin=0cm]{geometry}\n'
'\\pagestyle{empty}\n'
'\\tikzstyle{every node}=[cross out, draw, red]\n'
'\n'
'\\begin{document}\n'
'\n'
'\\vspace*{\\fill}\n'
'\\begin{center}\n'
'\\begin{tikzpicture}[x=0.5cm, y=-0.5cm, ultra thick, blue]\n'
'% Walls\n')
for x in range(len(hor_list_1)):
i = hor_list_1[x][0]
ii = hor_list_1[x][1]
j = hor_list_2[x][0]
jj = hor_list_2[x][1]
y = ' \\draw ({},{}) -- ({},{});\n'.format(i,ii,j,jj)
ff.write(y)
for x in range(len(ver_list_1)):
i = ver_list_1[x][0]
ii = ver_list_1[x][1]
j = ver_list_2[x][0]
jj = ver_list_2[x][1]
y = ' \\draw ({},{}) -- ({},{});\n'.format(i,ii,j,jj)
ff.write(y)
ff.write('% Pillars\n')
for x in pillars:
i = x[0]
ii = x[1]
y = ' \\fill[green] ({},{}) circle(0.2);\n'.format(i,ii)
ff.write(y)
ff.write('% Inner points in accessible cul-de-sacs\n')
for x in culdesac_nodes:
i = x[0]
ii = x[1]
y = ' \\node at ({},{}) {{}};\n'.format(i,ii)
ff.write(y)
ff.write('% Entry-exit paths without intersections\n')
for x in range(0,len(horizons2),2):
i = horizons2[x][0]
ii = horizons2[x][1]
j = horizons2[x+1][0]
jj = horizons2[x+1][1]
y = ' \\draw[dashed, yellow] ({},{}) -- ({},{});\n'.format(i,ii,j,jj)
ff.write(y)
for x in range(0,len(verticals2),2):
i = verticals2[x][0]
ii = verticals2[x][1]
j = verticals2[x+1][0]
jj = verticals2[x+1][1]
y = ' \\draw[dashed, yellow] ({},{}) -- ({},{});\n'.format(i,ii,j,jj)
ff.write(y)
ff.write('\\end{tikzpicture}\n'
'\\end{center}\n'
'\\vspace*{\\fill}\n'
'\n'
'\\end{document}\n') | 34.496057 | 107 | 0.350083 | 4,643 | 43,741 | 3.267715 | 0.055137 | 0.013973 | 0.021751 | 0.021751 | 0.817097 | 0.785921 | 0.773267 | 0.759953 | 0.759953 | 0.747232 | 0 | 0.048511 | 0.462747 | 43,741 | 1,268 | 108 | 34.496057 | 0.597106 | 0.078873 | 0 | 0.79203 | 0 | 0.001245 | 0.038931 | 0.003315 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021171 | false | 0 | 0.004981 | 0.004981 | 0.044832 | 0.007472 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
00c93bf226135d8d3d869c4ad59a088d502c36b1 | 256 | py | Python | bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/audio/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 11 | 2019-07-03T10:41:16.000Z | 2022-02-25T21:48:06.000Z | bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/audio/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 8 | 2019-11-23T00:01:25.000Z | 2021-04-29T12:30:31.000Z | bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/audio/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 13 | 2020-01-02T14:58:18.000Z | 2022-03-26T12:10:30.000Z | from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.audio.audio_api import AudioApi
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.audio.audio_adaptation_set_list_query_params import AudioAdaptationSetListQueryParams
| 85.333333 | 154 | 0.914063 | 32 | 256 | 7 | 0.5625 | 0.107143 | 0.133929 | 0.160714 | 0.625 | 0.625 | 0.625 | 0.625 | 0.625 | 0.625 | 0 | 0 | 0.03125 | 256 | 2 | 155 | 128 | 0.903226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
00d00a3161e6bc29938fa22fdd1214b76ac7e8bd | 24,414 | py | Python | tests/connect_board_test.py | taKana671/PhotoEditor | 6b658e088fcaeb84f49db2eeb282f9cb2f469264 | [
"MIT"
] | 2 | 2021-04-11T14:39:31.000Z | 2021-04-11T14:39:35.000Z | tests/connect_board_test.py | taKana671/PhotoEditor | 6b658e088fcaeb84f49db2eeb282f9cb2f469264 | [
"MIT"
] | null | null | null | tests/connect_board_test.py | taKana671/PhotoEditor | 6b658e088fcaeb84f49db2eeb282f9cb2f469264 | [
"MIT"
] | null | null | null | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(os.path.join(os.path.dirname(__file__), '../photoeditor'))
import tkinter as tk
from unittest import TestCase, mock, main
from PIL import Image
from TkinterDnD2 import *
from photoeditor.config import (SAVE_MSG_1, SAVE_MSG_2, SAVE_MSG_3, INFO, ERROR,
RIGHT_CANVAS_MSG_1, RIGHT_CANVAS_MSG_3, RIGHT_CANVAS_MSG_4)
from photoeditor.connect_board import EditorBoard, ConnectBoard
class ConnectBoardTestCase(TestCase):
def setUp(self):
self.test_path = 'test.jpg'
self.test_img = Image.open(self.test_path)
self.width, self.height = self.test_img.size
self.app = TkinterDnD.Tk()
self.app.withdraw()
self.editor = EditorBoard(self.app)
self.pump_events()
def tearDown(self):
self.test_img.close()
if self.app:
self.app.destroy()
self.pump_events()
def pump_events(self):
while self.app.dooneevent(tk._tkinter.ALL_EVENTS | tk._tkinter.DONT_WAIT):
pass
class SaveTestCase(ConnectBoardTestCase):
"""Test for save_with_pil
"""
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_image_path_is_None(self, mock_msgbox, mock_filedialog):
"""Check that a converted image is not saved
when Save button is clicked because img_path is None.
"""
self.editor.right_canvas.save_with_pil()
mock_msgbox.assert_called_once()
mock_filedialog.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_1)
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_width_entry_is_empty(self, mock_msgbox, mock_filedialog):
"""Check that a converted image is not saved
when Save button is clicked because width entry is empty.
"""
height_var = mock.MagicMock()
height_var.get.return_value = self.height
mock_height_var = height_var
width_var = mock.MagicMock()
width_var.get.return_value = ''
mock_width_var = width_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_msgbox.assert_called_once()
mock_filedialog.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), SAVE_MSG_3)
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_width_entry_is_0(self, mock_msgbox, mock_filedialog):
"""Check that a converted image is not saved
when Save button is clicked because width is 0.
"""
height_var = mock.MagicMock()
height_var.get.return_value = self.height
mock_height_var = height_var
width_var = mock.MagicMock()
width_var.get.return_value = 0
mock_width_var = width_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_msgbox.assert_called_once()
mock_filedialog.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), SAVE_MSG_1)
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_height_entry_is_empty(self, mock_msgbox, mock_filedialog):
"""Check that a converted image is not saved
when Save button is clicked because height entry is empty.
"""
height_var = mock.MagicMock()
height_var.get.return_value = ''
mock_height_var = height_var
width_var = mock.MagicMock()
width_var.get.return_value = self.width
mock_width_var = width_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_msgbox.assert_called_once()
mock_filedialog.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), SAVE_MSG_3)
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_height_entry_is_0(self, mock_msgbox, mock_filedialog):
"""Check that a converted image is not saved
when Save button is clicked because height is 0.
"""
height_var = mock.MagicMock()
height_var.get.return_value = 0
mock_height_var = height_var
width_var = mock.MagicMock()
width_var.get.return_value = self.width
mock_width_var = width_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_msgbox.assert_called_once()
mock_filedialog.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), SAVE_MSG_1)
@mock.patch('photoeditor.base_board.messagebox.showinfo')
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_save_path_is_selected(self, mock_err_msgbox, mock_filedialog, mock_info_msgbox):
"""Check that a converted image is saved without resizing
when Save button is clicked.
"""
width_var = mock.MagicMock()
width_var.get.return_value = self.width
mock_width_var = width_var
height_var = mock.MagicMock()
height_var.get.return_value = self.height
mock_height_var = height_var
mock_filedialog.return_value = 'test'
mock_current_img = mock.MagicMock(size=(self.width, self.height))
mock_current_save = mock.MagicMock()
mock_thumbnail = mock.MagicMock()
mock_thumbnail_save = mock.MagicMock()
mock_current_img.save = mock_current_save
mock_current_img.copy.return_value.thumbnail = mock_thumbnail
mock_current_img.copy.return_value.save = mock_thumbnail_save
with mock.patch.object(self.editor.right_canvas, 'current_img', mock_current_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_err_msgbox.assert_not_called()
mock_filedialog.assert_called_once()
mock_current_save.assert_called_once_with('test')
mock_info_msgbox.assert_called_once_with(INFO, SAVE_MSG_2)
mock_thumbnail.assert_not_called()
mock_thumbnail_save.assert_not_called()
@mock.patch('photoeditor.base_board.messagebox.showinfo')
@mock.patch('photoeditor.base_board.filedialog.asksaveasfilename')
@mock.patch('photoeditor.base_board.messagebox.showerror')
def test_save_path_is_selected_and_resized(self, mock_err_msgbox, mock_filedialog, mock_info_msgbox):
"""Check that a converted image is resized and saved
when Save button is clicked.
"""
width_var = mock.MagicMock()
width_var.get.return_value = self.width * 2
mock_width_var = width_var
height_var = mock.MagicMock()
height_var.get.return_value = self.height * 2
mock_height_var = height_var
mock_filedialog.return_value = 'test'
mock_current_img = mock.MagicMock(size=(self.width, self.height))
mock_current_save = mock.MagicMock()
mock_thumbnail = mock.MagicMock()
mock_thumbnail_save = mock.MagicMock()
mock_current_img.save = mock_current_save
mock_current_img.copy.return_value.thumbnail = mock_thumbnail
mock_current_img.copy.return_value.save = mock_thumbnail_save
with mock.patch.object(self.editor.right_canvas, 'current_img', mock_current_img):
with mock.patch.object(self.editor.right_canvas, 'width_var', mock_width_var):
with mock.patch.object(self.editor.right_canvas, 'height_var', mock_height_var):
self.editor.right_canvas.save_with_pil()
mock_err_msgbox.assert_not_called()
mock_filedialog.assert_called_once()
mock_current_save.assert_not_called()
mock_info_msgbox.assert_called_once_with(INFO, SAVE_MSG_2)
mock_thumbnail.assert_called_once()
mock_thumbnail_save.assert_called_once()
class ShowRepeatedImageTestCase(ConnectBoardTestCase):
"""Test for show_repeated_image
"""
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_current_img_is_none(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is not made on the right canvas
when Repeat button is clicked because current_img is None.
"""
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_1)
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_columns_entry_is_empty(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is not made on the right canvas
when Repeat button is clicked because columns entry is empty.
"""
int_var = mock.MagicMock()
int_var.get.return_value = ''
mock_col_var = int_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'col_var', mock_col_var):
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_3)
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_columns_entry_is_0(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is not made on the right canvas
when Repeat button is clicked because columns entry is 0.
"""
int_var = mock.MagicMock()
int_var.get.return_value = 0
mock_col_var = int_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'col_var', mock_col_var):
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_3)
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_rows_entry_is_empty(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is not made on the right canvas
when Repeat button is clicked because rows entry is empty.
"""
int_var = mock.MagicMock()
int_var.get.return_value = ''
mock_row_var = int_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'row_var', mock_row_var):
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_3)
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_rows_entry_is_0(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is not made on the right canvas
when Repeat button is clicked because rows entry is 0.
"""
int_var = mock.MagicMock()
int_var.get.return_value = ''
mock_row_var = int_var
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'row_var', mock_row_var):
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_3)
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_show_repeated_image(self, mock_msgbox, mock_create_image_pil, mock_display_image_size):
"""Check that a repeated image is made on the right canvas
when Repeat button is clicked.
"""
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
self.editor.right_canvas.show_repeated_image()
mock_create_image_pil.assert_called_once()
mock_display_image_size.assert_called_once_with(self.width, self.height)
mock_msgbox.assert_not_called()
class ResetImageTestCase(ConnectBoardTestCase):
"""Test for reset_image
"""
@mock.patch('photoeditor.base_board.tk.Canvas.delete')
def test_reset_image(self, mock_delete):
"""Check that an image displayed on the right canvas
is deleted when Reset button is clicked.
"""
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'concat_imgs', [self.test_img]):
self.editor.right_canvas.reset_image()
self.assertEqual(self.editor.right_canvas.current_img, None)
self.assertEqual(self.editor.right_canvas.concat_imgs, [])
mock_delete.assert_called_once_with('all')
class ShowConcatImageTestCase(ConnectBoardTestCase):
"""Test for show_concat_image
"""
@mock.patch('photoeditor.connect_board.RightCanvas.concat_vertically')
@mock.patch('photoeditor.connect_board.RightCanvas.concat_horizontally')
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_current_img_is_none(self, mock_msgbox, mock_create_image_pil, mock_display_image_size,
mock_concat_horizontally, mock_concat_vertically):
"""Check that a concat image is not made on the right canvas
when Connect button is clicked because current_img is None.
"""
self.editor.right_canvas.show_concat_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
mock_concat_horizontally.assert_not_called()
mock_concat_vertically.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_1)
@mock.patch('photoeditor.connect_board.RightCanvas.concat_vertically')
@mock.patch('photoeditor.connect_board.RightCanvas.concat_horizontally')
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_imgs_less_than_2(self, mock_msgbox, mock_create_image_pil, mock_display_image_size,
mock_concat_horizontally, mock_concat_vertically):
"""Check that a concat image is not made on the right canvas
when Connect button is clicked because concat_imga has images less than 2.
"""
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'concat_imgs', [self.test_img]):
self.editor.right_canvas.show_concat_image()
mock_create_image_pil.assert_not_called()
mock_display_image_size.assert_not_called()
mock_concat_horizontally.assert_not_called()
mock_concat_vertically.assert_not_called()
# error message
call_args = mock_msgbox.call_args_list[0]
self.assertEqual(call_args[0][0], ERROR)
self.assertEqual(str(call_args[0][1]), RIGHT_CANVAS_MSG_4)
@mock.patch('photoeditor.connect_board.RightCanvas.concat_vertically')
@mock.patch('photoeditor.connect_board.RightCanvas.concat_horizontally')
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_show_horizontally_concat_image(self, mock_msgbox, mock_create_image_pil, mock_display_image_size,
mock_concat_horizontally, mock_concat_vertically):
"""Check that a horizontally concatenated image is displayed
on the right canvas when Connect button is clicked.
"""
radio_bool = mock.MagicMock()
radio_bool.get.return_value = True
mock_radio_bool = radio_bool
imgs = [self.test_img, self.test_img]
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'concat_imgs', imgs):
with mock.patch.object(self.editor.right_canvas, 'radio_bool', mock_radio_bool):
self.editor.right_canvas.show_concat_image()
mock_concat_horizontally.assert_called_once()
mock_create_image_pil.assert_called_once()
mock_display_image_size.assert_called_once()
mock_concat_vertically.assert_not_called()
mock_msgbox.assert_not_called()
@mock.patch('photoeditor.connect_board.RightCanvas.concat_vertically')
@mock.patch('photoeditor.connect_board.RightCanvas.concat_horizontally')
@mock.patch('photoeditor.connect_board.RightCanvas.display_image_size')
@mock.patch('photoeditor.connect_board.RightCanvas.create_image_pil')
@mock.patch('photoeditor.connect_board.messagebox.showerror')
def test_show_vertically_concat_image(self, mock_msgbox, mock_create_image_pil, mock_display_image_size,
mock_concat_horizontally, mock_concat_vertically):
"""Check that a vertically concatenated image is displayed
on the right canvas when Connect button is clicked.
"""
radio_bool = mock.MagicMock()
radio_bool.get.return_value = False
mock_radio_bool = radio_bool
imgs = [self.test_img, self.test_img]
with mock.patch.object(self.editor.right_canvas, 'current_img', self.test_img):
with mock.patch.object(self.editor.right_canvas, 'concat_imgs', imgs):
with mock.patch.object(self.editor.right_canvas, 'radio_bool', mock_radio_bool):
self.editor.right_canvas.show_concat_image()
mock_concat_vertically.assert_called_once()
mock_create_image_pil.assert_called_once()
mock_display_image_size.assert_called_once()
mock_concat_horizontally.assert_not_called()
mock_msgbox.assert_not_called()
class ClearImagesTestCase(ConnectBoardTestCase):
"""Test for clear_images
"""
@mock.patch('photoeditor.base_board.tk.Canvas.delete')
def test_clear_image(self, mock_delete):
"""Check that images that the left canvas has
are deleted when Clear button is clicked.
"""
with mock.patch.object(self.editor.left_canvas, 'current_img', self.test_img):
with mock.patch.object(ConnectBoard, 'sources', {1: self.test_img}):
with mock.patch.object(ConnectBoard, 'source_idx', 1):
self.assertEqual(ConnectBoard.sources, {1: self.test_img})
self.assertEqual(ConnectBoard.source_idx, 1)
self.editor.left_canvas.clear_images()
self.assertEqual(ConnectBoard.sources, {})
self.assertEqual(ConnectBoard.source_idx, 0)
mock_delete.assert_called_once_with('all')
class ChangeImagesTestCase(ConnectBoardTestCase):
"""Test for change_images
"""
@mock.patch('photoeditor.connect_board.LeftCanvas.create_image_pil')
def test_sources_dict_is_empty(self, mock_create_image_pil):
"""Check that no image is displayed when Change button is clicked
because ConnectBoard.sources dict is empty.
"""
self.editor.left_canvas.change_images()
mock_create_image_pil.assert_not_called()
@mock.patch('photoeditor.connect_board.LeftCanvas.create_image_pil')
def test_change_images(self, mock_create_image_pil):
"""Check that images are changed on the left canvas
when Change button is clicked.
"""
sources = {1: self.test_img, 2: self.test_img}
with mock.patch.object(ConnectBoard, 'sources', sources):
self.editor.left_canvas.change_images()
self.editor.left_canvas.change_images()
mock_create_image_pil.assert_has_calls(
[mock.call(self.test_img), mock.call(self.test_img)])
if __name__ == '__main__':
main()
| 50.234568 | 110 | 0.704227 | 3,180 | 24,414 | 5.071069 | 0.051572 | 0.055252 | 0.071934 | 0.074228 | 0.90568 | 0.886643 | 0.868535 | 0.863326 | 0.85049 | 0.843545 | 0 | 0.005279 | 0.200786 | 24,414 | 485 | 111 | 50.338144 | 0.821187 | 0.102236 | 0 | 0.749296 | 0 | 0 | 0.158782 | 0.137424 | 0 | 0 | 0 | 0 | 0.24507 | 1 | 0.067606 | false | 0.002817 | 0.022535 | 0 | 0.109859 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
00e473fcf9aa7f2eff65e230b9373673bfb106a3 | 5,231 | py | Python | galaxy/bin_stack/inDev/compare_stack_with_spectra_SDSS.py | AndresSixtos/pyeBOSS | 4750908c8bc409633bef8f790133e3a1f3f0c9e4 | [
"CC0-1.0"
] | 1 | 2017-05-23T13:03:27.000Z | 2017-05-23T13:03:27.000Z | galaxy/bin_stack/inDev/compare_stack_with_spectra_SDSS.py | AndresSixtos/pyeBOSS | 4750908c8bc409633bef8f790133e3a1f3f0c9e4 | [
"CC0-1.0"
] | null | null | null | galaxy/bin_stack/inDev/compare_stack_with_spectra_SDSS.py | AndresSixtos/pyeBOSS | 4750908c8bc409633bef8f790133e3a1f3f0c9e4 | [
"CC0-1.0"
] | 2 | 2017-09-26T11:17:30.000Z | 2021-09-14T06:09:18.000Z | import sys
import os
from os.path import join
from SpectraStackingSDSSOnly import *
from HandleSdssPlate import *
import glob
stackDir = "/uufs/chpc.utah.edu/common/home/u0936736/stack_eBOSSELG"
def compareSpectrumToStack(entry, grid, nameRoot="elg270_eboss17_"):
# gets the spectrum
ObsPlate = HandleReducedELGPlate(entry['PLATE'],entry['MJD'])
ObsPlate.loadSpec(entry['FIBER'])
# gets the stack
if entry['index_Z1']>=0 and entry['Z_1']>0 and entry['Z_ERR_1']>0 and entry['Z_1']>entry['Z_ERR_1']:
suffix = "_Z1_"+str(n.round(grid[entry['index_Z1']],3))+"_"+str(n.round(grid[entry['index_Z1']+1],3))
stackName = join(stackDir, nameRoot + suffix + "_stack.fits")
hdu = fits.open(stackName)[1].data
sel = (hdu['NspectraPerPixel']>0.9*n.max(hdu['NspectraPerPixel']))
# compares stakc and spectrum at REDSHIFT
def getchi2(REDSHIFT):
wlmin=n.min(hdu['wavelength'][sel]*(1+REDSHIFT))
wlmax=n.max(hdu['wavelength'][sel]*(1+REDSHIFT))
meanStack =interp1d(hdu['wavelength'][sel]*(1+REDSHIFT),hdu['medianStack'][sel])
overlap=(ObsPlate.wavelength>wlmin)&(ObsPlate.wavelength<wlmax)
x = ObsPlate.wavelength[overlap]
y = ObsPlate.flux[overlap]
yerr = ObsPlate.fluxErr[overlap]
chi2mean = n.sum(((y - meanStack(x))/yerr)**2.)/len(x)
return chi2mean
return getchi2(entry['Z_1']), getchi2(entry['Z_2']), getchi2(entry['Z_3'])
else:
return -1,-1,-1
def compareSpectraAndStack(nameRoot):
summaryTableName =join(stackDir, nameRoot + "_summaryTable_Zstack.fits")
table = fits.open(summaryTableName)[1].data
zarr = table['Z_1'][(table['Z_1']>0)&(table['Z_1']>table['Z_ERR_1'])]
zarr.sort()
grid = zarr[::100]
chi1 = n.empty(len(table['PLATE']))
chi2 = n.empty(len(table['PLATE']))
chi3 = n.empty(len(table['PLATE']))
for ii in range(len(table['PLATE'])):
entry = table[ii]
print entry
chi1[ii], chi2[ii], chi3[ii] = compareSpectrumToStack(entry, grid = grid, nameRoot=nameRoot)
summaryTableName =join(stackDir, nameRoot + "_summaryTable_stack_Zstack_comparison.fits")
col_chi1 = fits.Column(name="chi2_Z_Z1",format="D", array= chi1)
col_chi2 = fits.Column(name="chi2_Z_Z2",format="D", array= chi2)
col_chi3 = fits.Column(name="chi2_Z_Z3",format="D", array= chi3)
cols = table.columns + col_chi1 + col_chi2 + col_chi3
tbhdu = fits.BinTableHDU.from_columns(cols)
prihdr = fits.Header()
prihdr['chunk'] = nameRoot
prihdu = fits.PrimaryHDU(header=prihdr)
thdulist = fits.HDUList([prihdu, tbhdu])
os.system('rm '+summaryTableName)
thdulist.writeto(summaryTableName)
nameRoot="elg270_eboss17"
compareSpectraAndStack(nameRoot)
nameRoot="elg270_eboss67"
compareSpectraAndStack(nameRoot)
sys.exit()
stackDir = "/uufs/chpc.utah.edu/common/home/u0936736/stack_eBOSSELG"
ggrid = [21.8,22.5,22.8]
rzgrid = [0.0,0.8,1.0,2.0]
grgrid = [0.0,0.4,0.6,1.0]
def compareSpectrumToStack(entry, nameRoot="elg270_eboss17_", ggrid = [21.8,22.5,22.8], rzgrid = [0.0,0.8,1.0,2.0], grgrid = [0.0,0.4,0.6,1.0]):
# gets the spectrum
ObsPlate = HandleReducedELGPlate(entry['PLATE'],entry['MJD'])
ObsPlate.loadSpec(entry['FIBER'])
# gets the stack
if entry['index_g']>=0:
suffix = "_g_"+str(n.round(ggrid[entry['index_g']],1))+"_rz_"+str(n.round(rzgrid[entry['index_rz']],1))+"_gr_"+str(n.round(grgrid[entry['index_gr']],1))
stackName = join(stackDir, nameRoot + suffix + "_stack.fits")
hdu = fits.open(stackName)[1].data
sel = (hdu['NspectraPerPixel']>0.9*n.max(hdu['NspectraPerPixel']))
# compares stakc and spectrum at REDSHIFT
def getchi2(REDSHIFT):
wlmin=n.min(hdu['wavelength'][sel]*(1+REDSHIFT))
wlmax=n.max(hdu['wavelength'][sel]*(1+REDSHIFT))
meanStack =interp1d(hdu['wavelength'][sel]*(1+REDSHIFT),hdu['medianStack'][sel])
overlap=(ObsPlate.wavelength>wlmin)&(ObsPlate.wavelength<wlmax)
x = ObsPlate.wavelength[overlap]
y = ObsPlate.flux[overlap]
yerr = ObsPlate.fluxErr[overlap]
chi2mean = n.sum(((y - meanStack(x))/yerr)**2.)/len(x)
return chi2mean
return getchi2(entry['Z_1']), getchi2(entry['Z_2']), getchi2(entry['Z_3'])
else:
return -1,-1,-1
def compareSpectraAndStack(nameRoot):
summaryTableName =join(stackDir, nameRoot + "summaryTable_stack.fits")
table = fits.open(summaryTableName)[1].data
chi1 = n.empty(len(table['PLATE']))
chi2 = n.empty(len(table['PLATE']))
chi3 = n.empty(len(table['PLATE']))
for ii in range(len(table['PLATE'])):
entry = table[ii]
print entry
chi1[ii], chi2[ii], chi3[ii] = compareSpectrumToStack(entry, nameRoot=nameRoot, ggrid = ggrid, rzgrid = rzgrid, grgrid = grgrid)
summaryTableName =join(stackDir, nameRoot + "summaryTable_stack_comparison.fits")
col_chi1 = fits.Column(name="chi2_Z1",format="D", array= chi1)
col_chi2 = fits.Column(name="chi2_Z2",format="D", array= chi2)
col_chi3 = fits.Column(name="chi2_Z3",format="D", array= chi3)
cols = table.columns + col_chi1 + col_chi2 + col_chi3
tbhdu = fits.BinTableHDU.from_columns(cols)
prihdr = fits.Header()
prihdr['chunk'] = nameRoot
prihdu = fits.PrimaryHDU(header=prihdr)
thdulist = fits.HDUList([prihdu, tbhdu])
os.system('rm '+summaryTableName)
thdulist.writeto(summaryTableName)
nameRoot="elg270_eboss17"
compareSpectraAndStack(nameRoot)
nameRoot="elg270_eboss67"
compareSpectraAndStack(nameRoot)
| 39.037313 | 154 | 0.712292 | 744 | 5,231 | 4.90457 | 0.181452 | 0.016443 | 0.028501 | 0.027953 | 0.877775 | 0.860784 | 0.846259 | 0.793642 | 0.772266 | 0.772266 | 0 | 0.043702 | 0.107628 | 5,231 | 133 | 155 | 39.330827 | 0.738003 | 0.027719 | 0 | 0.715596 | 0 | 0 | 0.147273 | 0.046072 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.055046 | null | null | 0.018349 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
dacba97bd9915a2af436c92acd674d05bbb8aeb6 | 60 | py | Python | Solutions/5kyu/5kyu_regex_password_validation.py | citrok25/Codewars-1 | dc641c5079e2e8b5955eb027fd15427e5bdb2e26 | [
"MIT"
] | 46 | 2017-08-24T09:27:57.000Z | 2022-02-25T02:24:33.000Z | Solutions/5kyu/5kyu_regex_password_validation.py | abbhishek971/Codewars | 9e761811db724da1e8aae44594df42b4ee879a16 | [
"MIT"
] | null | null | null | Solutions/5kyu/5kyu_regex_password_validation.py | abbhishek971/Codewars | 9e761811db724da1e8aae44594df42b4ee879a16 | [
"MIT"
] | 35 | 2017-08-01T22:09:48.000Z | 2022-02-18T17:21:37.000Z | regex= '^(?=.*[0-9])(?=.*[A-Z])(?=.*[a-z])[a-zA-Z0-9]{6,}$'
| 30 | 59 | 0.3 | 12 | 60 | 1.5 | 0.666667 | 0.222222 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086207 | 0.033333 | 60 | 1 | 60 | 60 | 0.224138 | 0 | 0 | 0 | 0 | 1 | 0.833333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
dadf121faa511497740a2d35edacf82cbe72372d | 158 | py | Python | qatrack/reports/faults/__init__.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 20 | 2021-03-11T18:37:32.000Z | 2022-03-23T19:38:07.000Z | qatrack/reports/faults/__init__.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 75 | 2021-02-12T02:37:33.000Z | 2022-03-29T20:56:16.000Z | qatrack/reports/faults/__init__.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 5 | 2021-04-07T15:46:53.000Z | 2021-09-18T16:55:00.000Z | from django.conf import settings
from qatrack.reports.faults.details import FaultDetailsReport
from qatrack.reports.faults.summary import FaultSummaryReport
| 31.6 | 61 | 0.873418 | 19 | 158 | 7.263158 | 0.631579 | 0.15942 | 0.26087 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.082278 | 158 | 4 | 62 | 39.5 | 0.951724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
dae0581b3d5b87aa088babf305c2882c391d3fc8 | 6,556 | py | Python | models.py | Tahlor/glom-pytorch | 45b2fc52af5288cd53611e497a70d53ffa303410 | [
"MIT"
] | null | null | null | models.py | Tahlor/glom-pytorch | 45b2fc52af5288cd53611e497a70d53ffa303410 | [
"MIT"
] | null | null | null | models.py | Tahlor/glom-pytorch | 45b2fc52af5288cd53611e497a70d53ffa303410 | [
"MIT"
] | null | null | null | import torch
import torchvision
import torch.nn as nn
import math
import torch.nn.functional as F
from einops.layers.torch import Rearrange
def normalize(s):
for m in s.children():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
return s
class VGG(nn.Module):
"""
Based on - https://github.com/kkweon/mnist-competition
from: https://github.com/ranihorev/Kuzushiji_MNIST/blob/master/KujuMNIST.ipynb
"""
def __init__(self, num_classes=27, pool="max", dropout=True, **kwargs):#62):
super().__init__()
self.pool = nn.MaxPool2d if "max" in pool.lower() else nn.AvgPool2d
self.dropout = nn.Dropout(p=0.5) if dropout else torch.nn.Identity()
self.l1 = self.two_conv_pool(1, 64, 64)
self.l2 = self.two_conv_pool(64, 128, 128)
self.l3 = self.three_conv_pool(128, 256, 256, 256)
self.l4 = self.three_conv_pool(256, 256, 256, 256)
self.classifier = nn.Sequential(
self.dropout,
nn.Linear(256, 512),
nn.BatchNorm1d(512),
nn.ReLU(inplace=True),
self.dropout,
nn.Linear(512, num_classes),
)
def two_conv_pool(self, in_channels, f1, f2):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f1),
nn.ReLU(inplace=True),
nn.Conv2d(f1, f2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f2),
nn.ReLU(inplace=True),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def three_conv_pool(self, in_channels, f1, f2, f3):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f1),
nn.ReLU(inplace=True),
nn.Conv2d(f1, f2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f2),
nn.ReLU(inplace=True),
nn.Conv2d(f2, f3, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f3),
nn.ReLU(inplace=True),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def forward(self, x):
x = self.l1(x)
x = self.l2(x)
x = self.l3(x)
x = self.l4(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return F.log_softmax(x, dim=1)
class VGGLinear(nn.Module):
"""
Based on - https://github.com/kkweon/mnist-competition
from: https://github.com/ranihorev/Kuzushiji_MNIST/blob/master/KujuMNIST.ipynb
"""
def __init__(self, num_classes=27, pool="max", dropout=True, **kwargs):#62):
super().__init__()
self.pool = nn.MaxPool2d if "max" in pool.lower() else nn.AvgPool2d
self.dropout = nn.Dropout(p=0.5) if dropout else torch.nn.Identity()
self.l1 = self.two_conv_pool(1, 64, 64)
self.l2 = self.two_conv_pool(64, 128, 128)
self.l3 = self.three_conv_pool(128, 256, 256, 256)
self.l4 = self.three_conv_pool(256, 256, 256, 256)
self.classifier = nn.Sequential(
self.dropout,
nn.Linear(256, 512),
nn.BatchNorm1d(512),
self.dropout,
nn.Linear(512, num_classes),
)
def two_conv_pool(self, in_channels, f1, f2):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f1),
nn.Conv2d(f1, f2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f2),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def three_conv_pool(self, in_channels, f1, f2, f3):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f1),
nn.Conv2d(f1, f2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f2),
nn.Conv2d(f2, f3, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f3),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def forward(self, x):
x = self.l1(x)
x = self.l2(x)
x = self.l3(x)
x = self.l4(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return F.log_softmax(x, dim=1)
class V1(nn.Module):
def __init__(self, num_classes=27, pool="max", dropout=True, **kwargs):#62):
super().__init__()
self.pool = nn.MaxPool2d if "max" in pool.lower() else nn.AvgPool2d
self.dropout = nn.Dropout(p=0.5) if dropout else torch.nn.Identity()
self.l1 = self.two_conv_pool(1, 64, 64)
self.l2 = self.two_conv_pool(64, 128, 128)
self.l3 = self.three_conv_pool(128, 256, 256, 256)
self.l4 = self.three_conv_pool(256, 256, 256, 256)
self.classifier = nn.Sequential(
self.dropout,
nn.Linear(256, 512),
nn.BatchNorm1d(512),
self.dropout,
nn.Linear(512, num_classes),
)
def two_conv_pool(self, in_channels, f1, f2):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f2),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def three_conv_pool(self, in_channels, f1, f2, f3):
s = nn.Sequential(
nn.Conv2d(in_channels, f1, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(f3),
self.pool(kernel_size=2, stride=2),
)
return normalize(s)
def forward(self, x):
x = self.l1(x)
x = self.l2(x)
x = self.l3(x)
x = self.l4(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return F.log_softmax(x, dim=1)
class LinearReg(nn.Module):
two_conv_pool = three_conv_pool = None
def __init__(self, num_classes=27, dropout=True, **kwargs):#62):
super().__init__()
self.dropout = nn.Dropout(p=0.5) if dropout else torch.nn.Identity()
self.classifier = nn.Sequential(
self.dropout,
nn.Linear(28*28, num_classes),
)
def forward(self, x):
b,c,h,w = x.shape
x = self.classifier(x.view(b,-1))
return F.log_softmax(x, dim=1) | 34.145833 | 82 | 0.568792 | 933 | 6,556 | 3.866024 | 0.123258 | 0.055448 | 0.039922 | 0.056557 | 0.881342 | 0.876629 | 0.870252 | 0.85639 | 0.845024 | 0.845024 | 0 | 0.074122 | 0.292099 | 6,556 | 192 | 83 | 34.145833 | 0.703081 | 0.043319 | 0 | 0.7875 | 0 | 0 | 0.002889 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.0375 | 0 | 0.2375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
971bfadf2d9f1de16ba4c87425af5d88e2e73c31 | 18,300 | py | Python | test/test_timeseries_generator.py | SebaDro/st-deep-hydro | 171b66d14eeab3f8f41eeb6ec481de6be16472fd | [
"Apache-2.0"
] | null | null | null | test/test_timeseries_generator.py | SebaDro/st-deep-hydro | 171b66d14eeab3f8f41eeb6ec481de6be16472fd | [
"Apache-2.0"
] | null | null | null | test/test_timeseries_generator.py | SebaDro/st-deep-hydro | 171b66d14eeab3f8f41eeb6ec481de6be16472fd | [
"Apache-2.0"
] | null | null | null | import math
import numpy as np
import pandas as pd
import unittest
import xarray as xr
from libs import dataset
from libs import generator
def create_streamflow_data():
dates = pd.date_range("2021", periods=20)
values = np.random.uniform(low=0, high=10, size=(20,))
na_indices = [9, 10, 11, 18]
values[na_indices] = np.NaN
df = pd.DataFrame({"streamflow": values}, index=dates)
return df
def create_forcings_data():
dates = pd.date_range("2021", periods=20)
temp_values = np.random.uniform(low=0, high=10, size=(20,))
prcp_values = np.random.uniform(low=0, high=10, size=(20,))
df = pd.DataFrame({"temp": temp_values, "prcp": prcp_values}, index=dates)
return df
def create_one_dimensional_dataset():
basin_1 = "123"
df_forcings_1 = create_forcings_data()
df_streamflow_1 = create_streamflow_data()
df_merged_1 = df_forcings_1.join(df_streamflow_1, how="outer")
ds_timeseries_1 = xr.Dataset.from_dataframe(df_merged_1)
ds_timeseries_1 = ds_timeseries_1.rename({"index": "time"})
ds_timeseries_1 = ds_timeseries_1.assign_coords({"basin": basin_1})
basin_2 = "456"
df_forcings_2 = create_forcings_data()
df_streamflow_2 = create_streamflow_data()
df_merged_2 = df_forcings_2.join(df_streamflow_2, how="outer")
ds_timeseries_2 = xr.Dataset.from_dataframe(df_merged_2)
ds_timeseries_2 = ds_timeseries_2.rename({"index": "time"})
ds_timeseries_2 = ds_timeseries_2.assign_coords({"basin": basin_2})
return dataset.HydroDataset(xr.concat([ds_timeseries_1, ds_timeseries_2], dim="basin"),
feature_variables=["temp", "prcp"], target_variables=["streamflow"])
def create_two_dimensional_dataset():
dates = pd.date_range("2021", periods=20)
x = np.arange(0, 14)
y = np.arange(0, 12)
basins = ["123"]
temp_data = np.random.uniform(low=0, high=10, size=(1, 20, 12, 14))
prcp_data = np.random.uniform(low=0, high=10, size=(1, 20, 12, 14))
streamflow_data = np.random.uniform(low=0, high=10, size=(1, 20))
na_indices = [9, 10, 11, 18]
streamflow_data[0, na_indices] = np.NaN
temp_xr = xr.DataArray(temp_data, coords=[basins, dates, y, x], dims=["basin", "time", "y", "x"])
prcp_xr = xr.DataArray(prcp_data, coords=[basins, dates, y, x], dims=["basin", "time", "y", "x"])
streamflow_xr = xr.DataArray(streamflow_data, coords=[basins, dates], dims=["basin", "time"])
return dataset.HydroDataset(xr.Dataset(dict(temp=temp_xr, prcp=prcp_xr, streamflow=streamflow_xr)),
feature_variables=["temp", "prcp"], target_variables=["streamflow"])
def create_two_dimensional_joined_dataset():
dates = pd.date_range("2021", periods=20)
x = np.arange(0, 14)
y = np.arange(0, 12)
basins = ["123"]
temp_data = np.random.uniform(low=0, high=10, size=(20, 12, 14))
prcp_data = np.random.uniform(low=0, high=10, size=(20, 12, 14))
streamflow_data = np.random.uniform(low=0, high=10, size=(1, 20))
na_indices = [9, 10, 11, 18]
streamflow_data[0, na_indices] = np.NaN
temp_xr = xr.DataArray(temp_data, coords=[dates, y, x], dims=["time", "y", "x"])
prcp_xr = xr.DataArray(prcp_data, coords=[dates, y, x], dims=["time", "y", "x"])
streamflow_xr = xr.DataArray(streamflow_data, coords=[basins, dates], dims=["basin", "time"])
return dataset.HydroDataset(xr.Dataset(dict(temp=temp_xr, prcp=prcp_xr, streamflow=streamflow_xr)),
feature_variables=["temp", "prcp"], target_variables=["streamflow"])
class TestCustomTimeseriesGenerator(unittest.TestCase):
def setUp(self):
self.basin_1 = "123"
self.basin_2 = "456"
self.ds = create_one_dimensional_dataset()
self.ds_2d = create_two_dimensional_dataset()
self.ds_2d_joined = create_two_dimensional_joined_dataset()
def test_get_input_shape_1d(self):
batch_size = 6
timesteps = 8
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
gen = generator.CustomTimeseriesGenerator(self.ds.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, False)
exp_shape = (0, 8, 2)
self.assertEqual(exp_shape, gen._get_input_shape())
def test_get_input_shape_2d(self):
batch_size = 6
timesteps = 6
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
gen = generator.CustomTimeseriesGenerator(self.ds_2d.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, False)
exp_shape = (0, 6, 12, 14, 2)
self.assertEqual(exp_shape, gen._get_input_shape())
def test_timeseries_generation(self):
batch_size = 6
timesteps = 4
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
lag = timesteps + offset - 1
gen = generator.CustomTimeseriesGenerator(self.ds.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, False)
# First, check the number of batches
# Substract time lag values from 20 timesteps for each of two basins
exp_batches = math.ceil((20 - lag) * 2 / batch_size)
self.assertEqual(exp_batches, len(gen))
# Then check first, middle and last batches
# First batch
batch = 0
inputs, targets = gen[batch]
x1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[0:timesteps, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[timesteps, 2]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Middle batch for both basins
batch = 2
inputs, targets = gen[batch]
# Last batch of basin 1
x1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[-(timesteps + offset):-1, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[-1, 2]
x2 = inputs[3]
y2 = targets[3]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# First batch of basin 2
x1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[0:timesteps, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[timesteps, 2]
x2 = inputs[4]
y2 = targets[4]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Last batch
batch = 5
inputs, targets = gen[batch]
x1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[-(timesteps + offset):-1, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[-1, 2]
x2 = inputs[1]
y2 = targets[1]
# Last batch should contain only 2 elements
self.assertEqual(2, len(inputs))
np.testing.assert_array_equal(x1, x2)
def test_timeseries_generation_with_nan(self):
batch_size = 6
timesteps = 4
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
lag = timesteps + offset - 1
# Note that the generator does not consider create input/target pairs for targets with NaN values.
# As a result the first two batches contains only inputs and targets for basin 1 and the other two batches
# for basin 2
gen = generator.CustomTimeseriesGenerator(self.ds.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, True)
# First, check the number of batches
# Substract time lag and 4 NaN values from 20 timesteps for each of two basins
nr_nan = 4
exp_batches = math.ceil((20 - lag - nr_nan) * 2 / batch_size)
self.assertEqual(exp_batches, len(gen))
# Then check first, middle and last batches
# First batch
batch = 0
inputs, targets = gen[batch]
x1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[0:timesteps, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[timesteps, 2]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Middle batch for both basins
batch = 1
inputs, targets = gen[batch]
# Last batch of basin 1.
x1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[-(timesteps + offset):-1, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_1).to_array().values.transpose()[-1, 2]
x2 = inputs[5]
y2 = targets[5]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# First batch of basin 2
batch = 2
inputs, targets = gen[batch]
x1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[0:timesteps, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[timesteps, 2]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Last batch
batch = 3
inputs, targets = gen[batch]
x1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[-(timesteps + offset):-1, 0:2]
y1 = self.ds.timeseries.sel(basin=self.basin_2).to_array().values.transpose()[-1, 2]
x2 = inputs[5]
y2 = targets[5]
# Last batch should contain 6 elements
self.assertEqual(6, len(inputs))
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
def test_timeseries_generation_2d(self):
batch_size = 6
timesteps = 4
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
lag = timesteps + offset - 1
shape = (
timesteps, len(self.ds_2d.timeseries.indexes["y"]), len(self.ds_2d.timeseries.indexes["x"]), len(feature_cols))
gen = generator.CustomTimeseriesGenerator(self.ds_2d.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, False, False, shape)
# First, check the number of batches
# Substract time lag from 20 timesteps for only one basin
exp_batches = math.ceil((20 - lag) / batch_size)
self.assertEqual(exp_batches, len(gen))
# Then check first, middle and last batches
# First batch
batch = 0
inputs, targets = gen[batch]
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
batch * batch_size:batch * batch_size + timesteps]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[
batch * batch_size + timesteps]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Middle batch
batch = 1
i = 2
inputs, targets = gen[batch]
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
i + batch * batch_size:i + batch * batch_size + timesteps]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[
i + batch_size + timesteps]
x2 = inputs[i]
y2 = targets[i]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Last batch
batch = 2
i = 3
inputs, targets = gen[batch]
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
-(timesteps + offset):-1]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[-1]
x2 = inputs[i]
y2 = targets[i]
# Last batch should contain only 4 elements
self.assertEqual(4, len(inputs))
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
def test_timeseries_generation_2d_without_nan(self):
batch_size = 6
timesteps = 4
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
lag = timesteps + offset - 1
shape = (timesteps, len(self.ds_2d.timeseries.indexes["y"]),
len(self.ds_2d.timeseries.indexes["x"]), len(feature_cols))
# Note that the generator does not consider create input/target pairs for targets with NaN values.
# As a result the number of batches is lower
gen = generator.CustomTimeseriesGenerator(self.ds_2d.timeseries, batch_size, timesteps, offset, feature_cols,
target_col, True, False, shape)
# First, check the number of batches
# Substract time lag and 4 NaN values from 20 timesteps
nr_nan = 4
exp_batches = math.ceil((20 - lag - nr_nan) / batch_size)
self.assertEqual(exp_batches, len(gen))
# Then check first, middle and last batches
# First batch
batch = 0
inputs, targets = gen[batch]
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
batch * batch_size:batch * batch_size + timesteps]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[
batch * batch_size + timesteps]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Last batch (due to removal of NaN, the second batch is also the last batch)
batch = 1
inputs, targets = gen[batch]
# Due to NaN values, now the target at index position 13 is first target value of the second batch
i_target = 13
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
i_target - timesteps:i_target]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[i_target]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Check also last values of last batch
x1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
-(timesteps + offset):-1]
y1 = np.moveaxis(self.ds_2d.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[-1]
x2 = inputs[5]
y2 = targets[5]
# Last batch should contain 6 elements
self.assertEqual(6, len(inputs))
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
def test_timeseries_generation_2d_with_joined_output(self):
batch_size = 6
timesteps = 4
offset = 1
feature_cols = ["temp", "prcp"]
target_col = "streamflow"
lag = timesteps + offset - 1
shape = (timesteps, len(self.ds_2d_joined.timeseries.indexes["y"]),
len(self.ds_2d_joined.timeseries.indexes["x"]), len(feature_cols))
# Note that the generator does not consider create input/target pairs for targets with NaN values.
# As a result the number of batches is lower
gen = generator.CustomTimeseriesGenerator(self.ds_2d_joined.timeseries, batch_size, timesteps, offset,
feature_cols, target_col, True, True, shape)
# First, check the number of batches
# Substract time lag and 4 NaN values from 20 timesteps
nr_nan = 4
exp_batches = math.ceil((20 - lag - nr_nan) / batch_size)
self.assertEqual(exp_batches, len(gen))
# Then check first, middle and last batches
# First batch
batch = 0
inputs, targets = gen[batch]
x1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
batch * batch_size:batch * batch_size + timesteps]
y1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[
batch * batch_size + timesteps]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Last batch (due to removal of NaN, the second batch is also the last batch)
batch = 1
inputs, targets = gen[batch]
# Due to NaN values, now the target at index position 13 is first target value of the second batch
i_target = 13
x1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
i_target - timesteps:i_target]
y1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[i_target]
x2 = inputs[0]
y2 = targets[0]
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
# Check also last values of last batch
x1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[feature_cols].to_array().values, 0, -1)[
-(timesteps + offset):-1]
y1 = np.moveaxis(self.ds_2d_joined.timeseries.sel(basin=self.basin_1)[[target_col]].to_array().values, 0, -1)[-1]
x2 = inputs[5]
y2 = targets[5]
# Last batch should contain 6 elements
self.assertEqual(6, len(inputs))
np.testing.assert_array_equal(x1, x2)
np.testing.assert_array_equal(y1, y2)
| 42.957746 | 127 | 0.626612 | 2,530 | 18,300 | 4.353755 | 0.066798 | 0.027236 | 0.055561 | 0.067907 | 0.918112 | 0.882796 | 0.857739 | 0.845665 | 0.839673 | 0.825057 | 0 | 0.042862 | 0.251639 | 18,300 | 425 | 128 | 43.058824 | 0.761446 | 0.11235 | 0 | 0.705502 | 0 | 0 | 0.020562 | 0 | 0 | 0 | 0 | 0 | 0.145631 | 1 | 0.042071 | false | 0 | 0.022654 | 0 | 0.084142 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9744651c561d6b713c8b34352e29e2fdaef7bb94 | 52,955 | py | Python | data.py | kovrov/wakebreaker | cc9b38351dad5b71d0be913a31202d031e3ff65b | [
"BSD-3-Clause"
] | null | null | null | data.py | kovrov/wakebreaker | cc9b38351dad5b71d0be913a31202d031e3ff65b | [
"BSD-3-Clause"
] | null | null | null | data.py | kovrov/wakebreaker | cc9b38351dad5b71d0be913a31202d031e3ff65b | [
"BSD-3-Clause"
] | null | null | null | import pyglet
def loadBoat1():
return {
'texture': pyglet.image.load('boat1tex.png').get_texture(),
'vertices': (
(0, -9.3916, 0.726013),
(0, -0.658051, -0.372787),
(2.15999, -5.97165, 0.726013),
(1.5168, -1.10376, -0.164337),
(2.71582, -3.53603, 1.10835),
(2.15999, -5.97165, 0.726013),
(2.15999, -5.97165, 0.726013),
(0, -5.10858, 0.998764),
(0, -9.3916, 0.726013),
(0, -0.658051, -0.372787),
(0, -9.3916, 0.726013),
(-2.15999, -5.97165, 0.726013),
(-1.5168, -1.10376, -0.164337),
(0, -0.658051, -0.372787),
(-2.15999, -5.97165, 0.726013),
(-1.5168, -1.10376, -0.164337),
(-2.15999, -5.97165, 0.726013),
(-2.71582, -3.53603, 1.10835),
(0, -9.3916, 0.726013),
(0, -5.10858, 0.998764),
(-2.15999, -5.97165, 0.726013),
(2.15999, -5.97165, 0.726013),
(0, -0.658051, -0.372787),
(1.5168, -1.10376, -0.164337),
(2.71582, -3.53603, 1.10835),
(1.35791, -3.88701, 1.05356),
(2.15999, -5.97165, 0.726013),
(0, -5.10858, 0.998764),
(-1.35791, -3.88701, 1.05356),
(-2.15999, -5.97165, 0.726013),
(1.5168, -1.10376, -0.164337),
(0, -0.658051, -0.372787),
(1.29581, 4.017, 0.621231),
(-2.71582, -3.53603, 1.10835),
(-2.07408, 1.72919, 0.862762),
(-1.5168, -1.10376, -0.164337),
(-1.29581, 4.017, 0.621231),
(0, -0.658051, -0.372787),
(-1.5168, -1.10376, -0.164337),
(2.07408, 1.72919, 0.862762),
(1.29581, 4.017, 0.621231),
(0, 0.152588, 1.80235),
(-2.07408, 1.72919, 0.862762),
(0, 0.152588, 1.80235),
(-1.29581, 4.017, 0.621231),
(0, 4.017, -0.513504),
(0, -0.658051, -0.372787),
(-1.29581, 4.017, 0.621231),
(-1.29581, 4.017, 0.621231),
(1.29581, 4.017, 0.621231),
(0, 4.017, -0.513504),
(0, -0.658051, -0.372787),
(0, 4.017, -0.513504),
(1.29581, 4.017, 0.621231),
(-1.29581, 4.017, 0.621231),
(0, 0.152588, 1.80235),
(0, 4.017, 1.79726),
(0, 0.152588, 1.80235),
(1.29581, 4.017, 0.621231),
(0, 4.017, 1.79726),
(-1.29581, 4.017, 0.621231),
(0, 4.017, 1.79726),
(1.29581, 4.017, 0.621231),
(-1.35791, -3.88701, 1.05356),
(-1.58604, -1.8672, 1.32179),
(-2.71582, -3.53603, 1.10835),
(2.07408, 1.72919, 0.862762),
(1.58604, -1.8672, 1.32179),
(2.71582, -3.53603, 1.10835),
(2.71582, -3.53603, 1.10835),
(1.5168, -1.10376, -0.164337),
(2.07408, 1.72919, 0.862762),
(3.41731, 1.87068, 0.76561),
(1.5168, -1.10376, -0.164337),
(2.77556, 4.15849, 0.520035),
(1.29581, 4.017, 0.621231),
(2.77556, 4.15849, 0.520035),
(1.5168, -1.10376, -0.164337),
(3.41731, 1.87068, 0.76561),
(2.77556, 4.15849, 0.520035),
(2.07408, 1.72919, 0.862762),
(-2.07408, 1.72919, 0.862762),
(-3.41731, 1.87068, 0.76561),
(-1.5168, -1.10376, -0.164337),
(-1.29581, 4.017, 0.621231),
(-2.77556, 4.15849, 0.520035),
(-2.07408, 1.72919, 0.862762),
(-3.41731, 1.87068, 0.76561),
(-2.77556, 4.15849, 0.520035),
(-1.5168, -1.10376, -0.164337),
(1.35791, -3.88701, 1.05356),
(0, -3.99136, 1.52379),
(0, -5.10858, 0.998764),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(-0.678955, -1.9857, 2.14563),
(-1.35791, -3.88701, 1.05356),
(0, -5.10858, 0.998764),
(0, -3.99136, 1.52379),
(-0.678955, -1.9857, 2.14563),
(0, -3.99136, 1.52379),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(0.678955, -1.9857, 2.14563),
(2.07408, 1.72919, 0.862762),
(1.58604, -1.8672, 1.32179),
(2.07408, 1.72919, 0.862762),
(0.678955, -1.9857, 2.14563),
(0, 0.152588, 1.80235),
(-2.07408, 1.72919, 0.862762),
(-0.678955, -1.9857, 2.14563),
(-0.678955, -1.9857, 2.14563),
(-2.07408, 1.72919, 0.862762),
(-1.58604, -1.8672, 1.32179),
(1.35791, -3.88701, 1.05356),
(2.71582, -3.53603, 1.10835),
(1.58604, -1.8672, 1.32179),
(-2.07408, 1.72919, 0.862762),
(-2.71582, -3.53603, 1.10835),
(-1.58604, -1.8672, 1.32179),
(-1.29581, 4.017, 0.621231),
(-1.5168, -1.10376, -0.164337),
(-2.77556, 4.15849, 0.520035),
(2.07408, 1.72919, 0.862762),
(1.5168, -1.10376, -0.164337),
(3.41731, 1.87068, 0.76561),
(-2.71582, -3.53603, 1.10835),
(-2.15999, -5.97165, 0.726013),
(-1.35791, -3.88701, 1.05356),
(0, -5.10858, 0.998764),
(2.15999, -5.97165, 0.726013),
(1.35791, -3.88701, 1.05356),
(1.29581, 4.017, 0.621231),
(2.07408, 1.72919, 0.862762),
(2.77556, 4.15849, 0.520035),
(-3.41731, 1.87068, 0.76561),
(-2.07408, 1.72919, 0.862762),
(-2.77556, 4.15849, 0.520035),
(0, -3.99136, 1.52379),
(-0.678955, -1.9857, 2.14563),
(-1.35791, -3.88701, 1.05356),
(1.58604, -1.8672, 1.32179),
(0.678955, -1.9857, 2.14563),
(1.35791, -3.88701, 1.05356),
(0, -3.99136, 1.52379),
(1.35791, -3.88701, 1.05356),
(0.678955, -1.9857, 2.14563),
(-1.58604, -1.8672, 1.32179),
(-1.35791, -3.88701, 1.05356),
(-0.678955, -1.9857, 2.14563)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149),
'uvmap': (
(0.0381012, -0.261093),
(0.629089, -0.261734),
(0.26564, -0.406982),
(0.598373, -0.370407),
(0.419739, -0.454941),
(0.26564, -0.406982),
(0.751053, -0.829849),
(0.670044, -0.688614),
(0.966003, -0.648087),
(0.629089, -0.261734),
(0.0381012, -0.261093),
(0.266113, -0.11615),
(0.596466, -0.154099),
(0.629089, -0.261734),
(0.266113, -0.11615),
(0.596466, -0.154099),
(0.266113, -0.11615),
(0.420105, -0.0706634),
(0.966003, -0.648087),
(0.670044, -0.688614),
(0.71019, -0.53093),
(0.26564, -0.406982),
(0.629089, -0.261734),
(0.598373, -0.370407),
(0.586777, -0.888382),
(0.599594, -0.79364),
(0.751053, -0.829849),
(0.670044, -0.688614),
(0.574402, -0.606201),
(0.71019, -0.53093),
(0.598373, -0.370407),
(0.629089, -0.261734),
(0.937607, -0.357208),
(0.420105, -0.0706634),
(0.734055, -0.0180816),
(0.596466, -0.154099),
(0.929169, -0.153946),
(0.629089, -0.261734),
(0.596466, -0.154099),
(0.244492, -0.895523),
(0.089508, -0.878281),
(0.32309, -0.734955),
(0.2061, -0.600281),
(0.32309, -0.734955),
(0.0610046, -0.656113),
(0.931931, -0.254898),
(0.629089, -0.261734),
(0.929169, -0.153946),
(0.0371246, -0.443939),
(0.20549, -0.443939),
(0.121307, -0.517654),
(0.629089, -0.261734),
(0.931931, -0.254898),
(0.937607, -0.357208),
(0.0610046, -0.656113),
(0.32309, -0.734955),
(0.0757598, -0.767059),
(0.32309, -0.734955),
(0.089508, -0.878281),
(0.0757598, -0.767059),
(0.223709, -0.52034),
(0.307892, -0.443939),
(0.392059, -0.52034),
(0.574402, -0.606201),
(0.440201, -0.606033),
(0.537262, -0.518631),
(0.244492, -0.895523),
(0.469864, -0.829056),
(0.586777, -0.888382),
(0.419739, -0.454941),
(0.598373, -0.370407),
(0.74202, -0.507355),
(0.80455, -0.490738),
(0.598373, -0.370407),
(0.949036, -0.447693),
(0.937607, -0.357208),
(0.949036, -0.447693),
(0.598373, -0.370407),
(0.253433, -0.982468),
(0.0995025, -0.973648),
(0.244492, -0.895523),
(0.734055, -0.0180816),
(0.79274, -0.0307617),
(0.596466, -0.154099),
(0.0610046, -0.656113),
(0.0473632, -0.561722),
(0.2061, -0.600281),
(0.79274, -0.0307617),
(0.933807, -0.0664215),
(0.596466, -0.154099),
(0.599594, -0.79364),
(0.587357, -0.699814),
(0.670044, -0.688614),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.45636, -0.68013),
(0.574402, -0.606201),
(0.670044, -0.688614),
(0.587357, -0.699814),
(0.45636, -0.68013),
(0.587357, -0.699814),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.466156, -0.753265),
(0.244492, -0.895523),
(0.469864, -0.829056),
(0.244492, -0.895523),
(0.466156, -0.753265),
(0.32309, -0.734955),
(0.2061, -0.600281),
(0.45636, -0.68013),
(0.45636, -0.68013),
(0.2061, -0.600281),
(0.440201, -0.606033),
(0.599594, -0.79364),
(0.586777, -0.888382),
(0.469864, -0.829056),
(0.2061, -0.600281),
(0.537262, -0.518631),
(0.440201, -0.606033),
(0.929169, -0.153946),
(0.596466, -0.154099),
(0.933807, -0.0664215),
(0.74202, -0.507355),
(0.598373, -0.370407),
(0.80455, -0.490738),
(0.537262, -0.518631),
(0.71019, -0.53093),
(0.574402, -0.606201),
(0.670044, -0.688614),
(0.751053, -0.829849),
(0.599594, -0.79364),
(0.089508, -0.878281),
(0.244492, -0.895523),
(0.0995025, -0.973648),
(0.192841, -0.514603),
(0.2061, -0.600281),
(0.0473632, -0.561722),
(0.587357, -0.699814),
(0.45636, -0.68013),
(0.574402, -0.606201),
(0.469864, -0.829056),
(0.466156, -0.753265),
(0.599594, -0.79364),
(0.587357, -0.699814),
(0.599594, -0.79364),
(0.466156, -0.753265),
(0.440201, -0.606033),
(0.574402, -0.606201),
(0.45636, -0.68013))}
def loadBoat2():
return {
'texture': pyglet.image.load('boat2tex.png').get_texture(),
'vertices': (
(0, -11.7114, 0.0783996),
(0, -1.77814, -1.28052),
(2.15999, -5.97165, 0.0783996),
(2.62422, -1.10376, -0.811966),
(1.00511, -3.53603, 0.460724),
(2.15999, -5.97165, 0.0783996),
(0, -3.6402, 0.996384),
(0, -5.97916, 0.351135),
(1.00511, -3.53603, 0.460724),
(3.58784, 3.77087, 0.671005),
(3.04967, 0.274628, 0.671005),
(3.61475, 2.959, -0.435928),
(2.62422, -1.10376, -0.811966),
(1.96803, -0.236206, 0.873077),
(1.00511, -3.53603, 0.460724),
(3.04967, 0.274628, 0.671005),
(1.42467, 3.89023, 0.751083),
(1.96803, -0.236206, 0.873077),
(3.04967, 0.274628, 0.671005),
(2.62422, -1.10376, -0.811966),
(3.61475, 0.523361, -0.435928),
(2.26152, 4.90634, 0.289413),
(2.27435, 4.92723, -0.41684),
(1.42467, 3.89023, 0.751083),
(3.58784, 3.77087, 0.671005),
(2.70465, 4.90634, 0.309814),
(1.42467, 3.89023, 0.751083),
(0, -3.6402, 0.996384),
(1.28694, 0.56517, 1.51672),
(-1.33673, 0.56517, 1.51672),
(2.15999, -5.97165, 0.0783996),
(0, -5.97916, 0.351135),
(0, -11.7114, 0.0783996),
(0, -1.77814, -1.28052),
(0, -11.7114, 0.0783996),
(-2.15999, -5.97165, 0.0783996),
(-2.62422, -1.10376, -0.811966),
(0, -1.77814, -1.28052),
(-2.15999, -5.97165, 0.0783996),
(-2.62422, -1.10376, -0.811966),
(-2.15999, -5.97165, 0.0783996),
(-1.00511, -3.53603, 0.460724),
(0, -3.6402, 0.996384),
(-1.00511, -3.53603, 0.460724),
(0, -5.97916, 0.351135),
(-3.61475, 0.523361, -0.435928),
(-3.04967, 0.274628, 0.671005),
(-3.61475, 2.959, -0.435928),
(-1.33673, 0.56517, 1.51672),
(-1.96803, -0.236206, 0.873077),
(-1.00511, -3.53603, 0.460724),
(-3.58784, 3.77087, 0.671005),
(-3.04967, 0.274628, 0.671005),
(-1.42467, 3.89023, 0.751083),
(-1.43999, 3.79219, -0.811966),
(-2.27435, 4.92723, -0.41684),
(-1.42467, 3.89023, 0.751083),
(-1.42467, 3.89023, 0.751083),
(-2.26152, 4.90634, 0.289413),
(-3.58784, 3.77087, 0.671005),
(-2.62422, -1.10376, -0.811966),
(-1.96803, -0.236206, 0.873077),
(-3.04967, 0.274628, 0.671005),
(0, -11.7114, 0.0783996),
(0, -5.97916, 0.351135),
(-2.15999, -5.97165, 0.0783996),
(1.00511, -3.53603, 0.460724),
(1.28694, 0.56517, 1.51672),
(0, -3.6402, 0.996384),
(-1.00511, -3.53603, 0.460724),
(0, -3.6402, 0.996384),
(-1.33673, 0.56517, 1.51672),
(-1.42467, 3.89023, 0.751083),
(-1.33673, 0.56517, 1.51672),
(0, 4.0096, 1.76315),
(0, 4.0096, 1.76315),
(1.42467, 3.89023, 0.751083),
(0, 7.38785, 1.73131),
(2.15999, -5.97165, 0.0783996),
(0, -1.77814, -1.28052),
(2.62422, -1.10376, -0.811966),
(2.15999, -5.97165, 0.0783996),
(1.00511, -3.53603, 0.460724),
(0, -5.97916, 0.351135),
(3.61475, 0.523361, -0.435928),
(3.61475, 2.959, -0.435928),
(3.04967, 0.274628, 0.671005),
(3.58784, 3.77087, 0.671005),
(1.42467, 3.89023, 0.751083),
(3.04967, 0.274628, 0.671005),
(3.58784, 3.77087, 0.671005),
(3.61475, 2.959, -0.435928),
(2.70465, 4.90634, 0.309814),
(1.43999, 3.79219, -0.811966),
(1.42467, 3.89023, 0.751083),
(2.27435, 4.92723, -0.41684),
(2.26152, 4.90634, 0.289413),
(1.42467, 3.89023, 0.751083),
(2.70465, 4.90634, 0.309814),
(-2.15999, -5.97165, 0.0783996),
(0, -5.97916, 0.351135),
(-1.00511, -3.53603, 0.460724),
(-3.58784, 3.77087, 0.671005),
(-3.61475, 2.959, -0.435928),
(-3.04967, 0.274628, 0.671005),
(-3.04967, 0.274628, 0.671005),
(-3.61475, 0.523361, -0.435928),
(-2.62422, -1.10376, -0.811966),
(-1.43999, 3.79219, -0.811966),
(-3.61475, 2.959, -0.435928),
(-2.27435, 4.92723, -0.41684),
(-2.26152, 4.90634, 0.289413),
(-1.42467, 3.89023, 0.751083),
(-2.27435, 4.92723, -0.41684),
(-2.70465, 4.90634, 0.309814),
(-3.58784, 3.77087, 0.671005),
(-2.26152, 4.90634, 0.289413),
(-1.33673, 0.56517, 1.51672),
(1.28694, 0.56517, 1.51672),
(0, 4.0096, 1.76315),
(1.42467, 3.89023, 0.751083),
(0, 4.0096, 1.76315),
(1.28694, 0.56517, 1.51672),
(1.43999, 3.79219, -0.811966),
(0, -1.77814, -1.28052),
(-1.43999, 3.79219, -0.811966),
(0, -1.77814, -1.28052),
(-2.62422, -1.10376, -0.811966),
(-1.43999, 3.79219, -0.811966),
(3.61475, 2.959, -0.435928),
(3.61475, 0.523361, -0.435928),
(1.43999, 3.79219, -0.811966),
(3.61475, 0.523361, -0.435928),
(2.62422, -1.10376, -0.811966),
(1.43999, 3.79219, -0.811966),
(-3.61475, 2.959, -0.435928),
(-1.43999, 3.79219, -0.811966),
(-3.61475, 0.523361, -0.435928),
(-3.61475, 0.523361, -0.435928),
(-1.43999, 3.79219, -0.811966),
(-2.62422, -1.10376, -0.811966),
(0, -1.77814, -1.28052),
(1.43999, 3.79219, -0.811966),
(2.62422, -1.10376, -0.811966),
(2.62422, -1.10376, -0.811966),
(3.04967, 0.274628, 0.671005),
(1.96803, -0.236206, 0.873077),
(1.28694, 0.56517, 1.51672),
(1.96803, -0.236206, 0.873077),
(1.42467, 3.89023, 0.751083),
(-1.33673, 0.56517, 1.51672),
(-1.42467, 3.89023, 0.751083),
(-1.96803, -0.236206, 0.873077),
(-3.04967, 0.274628, 0.671005),
(-1.96803, -0.236206, 0.873077),
(-1.42467, 3.89023, 0.751083),
(-2.62422, -1.10376, -0.811966),
(-1.00511, -3.53603, 0.460724),
(-1.96803, -0.236206, 0.873077),
(1.28694, 0.56517, 1.51672),
(1.00511, -3.53603, 0.460724),
(1.96803, -0.236206, 0.873077),
(-2.26152, 4.90634, 0.289413),
(-2.27435, 4.92723, -0.41684),
(-2.70465, 4.90634, 0.309814),
(-3.58784, 3.77087, 0.671005),
(-2.70465, 4.90634, 0.309814),
(-3.61475, 2.959, -0.435928),
(-2.27435, 4.92723, -0.41684),
(-3.61475, 2.959, -0.435928),
(-2.70465, 4.90634, 0.309814),
(2.26152, 4.90634, 0.289413),
(2.70465, 4.90634, 0.309814),
(2.27435, 4.92723, -0.41684),
(1.43999, 3.79219, -0.811966),
(2.27435, 4.92723, -0.41684),
(3.61475, 2.959, -0.435928),
(2.70465, 4.90634, 0.309814),
(3.61475, 2.959, -0.435928),
(2.27435, 4.92723, -0.41684),
(1.42467, 3.89023, 0.751083),
(1.43999, 3.79219, -0.811966),
(0, 7.38785, 1.73131),
(0, 4.0096, 1.76315),
(0, 7.38785, 1.73131),
(-1.42467, 3.89023, 0.751083),
(1.43999, 3.79219, -0.811966),
(-1.43999, 3.79219, -0.811966),
(0, 7.38785, 1.73131),
(-1.42467, 3.89023, 0.751083),
(0, 7.38785, 1.73131),
(-1.43999, 3.79219, -0.811966)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191),
'uvmap': (
(0.525665, -1),
(0.506287, -0.403488),
(0.217407, -0.684235),
(0.14389, -0.384583),
(0.371979, -0.561615),
(0.217407, -0.684235),
(0.510803, -0.584961),
(0.514938, -0.692169),
(0.371979, -0.561615),
(0.00263977, -0.166748),
(0.0833587, -0.359756),
(0, -0.173569),
(0.14389, -0.384583),
(0.233337, -0.394058),
(0.371979, -0.561615),
(0.0833587, -0.359756),
(0.300293, -0.161026),
(0.233337, -0.394058),
(0.0833587, -0.359756),
(0.14389, -0.384583),
(0.00460815, -0.308365),
(0.182938, -0.0896148),
(0.180832, -0.0641479),
(0.300293, -0.161026),
(0.00263977, -0.166748),
(0.121948, -0.0906982),
(0.300293, -0.161026),
(0.510803, -0.584961),
(0.322433, -0.376129),
(0.68367, -0.37384),
(0.217407, -0.684235),
(0.514938, -0.692169),
(0.525665, -1),
(0.506287, -0.403488),
(0.525665, -1),
(0.81221, -0.680481),
(0.866531, -0.38002),
(0.506287, -0.403488),
(0.81221, -0.680481),
(0.866531, -0.38002),
(0.81221, -0.680481),
(0.648758, -0.559875),
(0.510803, -0.584961),
(0.648758, -0.559875),
(0.514938, -0.692169),
(1, -0.302063),
(0.923141, -0.354462),
(0.995377, -0.167282),
(0.68367, -0.37384),
(0.775269, -0.390625),
(0.648758, -0.559875),
(0.990631, -0.160507),
(0.923141, -0.354462),
(0.692596, -0.158539),
(0.694214, -0.110107),
(0.807114, -0.0601806),
(0.692596, -0.158539),
(0.692596, -0.158539),
(0.805695, -0.0856781),
(0.990631, -0.160507),
(0.866531, -0.38002),
(0.775269, -0.390625),
(0.923141, -0.354462),
(0.525665, -1),
(0.514938, -0.692169),
(0.81221, -0.680481),
(0.371979, -0.561615),
(0.322433, -0.376129),
(0.510803, -0.584961),
(0.648758, -0.559875),
(0.510803, -0.584961),
(0.68367, -0.37384),
(0.692596, -0.158539),
(0.68367, -0.37384),
(0.496658, -0.188034),
(0.496658, -0.188034),
(0.300293, -0.161026),
(0.490265, 0),
(0.217407, -0.684235),
(0.506287, -0.403488),
(0.14389, -0.384583),
(0.217407, -0.684235),
(0.371979, -0.561615),
(0.514938, -0.692169),
(0.00460815, -0.308365),
(0, -0.173569),
(0.0833587, -0.359756),
(0.00263977, -0.166748),
(0.300293, -0.161026),
(0.0833587, -0.359756),
(0.00263977, -0.166748),
(0, -0.173569),
(0.121948, -0.0906982),
(0.297684, -0.112625),
(0.300293, -0.161026),
(0.180832, -0.0641479),
(0.182938, -0.0896148),
(0.300293, -0.161026),
(0.121948, -0.0906982),
(0.81221, -0.680481),
(0.514938, -0.692169),
(0.648758, -0.559875),
(0.990631, -0.160507),
(0.995377, -0.167282),
(0.923141, -0.354462),
(0.923141, -0.354462),
(1, -0.302063),
(0.866531, -0.38002),
(0.694214, -0.110107),
(0.995377, -0.167282),
(0.807114, -0.0601806),
(0.805695, -0.0856781),
(0.692596, -0.158539),
(0.807114, -0.0601806),
(0.866714, -0.0859985),
(0.990631, -0.160507),
(0.805695, -0.0856781),
(0.68367, -0.37384),
(0.322433, -0.376129),
(0.496658, -0.188034),
(0.300293, -0.161026),
(0.496658, -0.188034),
(0.322433, -0.376129),
(0.297684, -0.112625),
(0.506287, -0.403488),
(0.694214, -0.110107),
(0.506287, -0.403488),
(0.866531, -0.38002),
(0.694214, -0.110107),
(0, -0.173569),
(0.00460815, -0.308365),
(0.297684, -0.112625),
(0.00460815, -0.308365),
(0.14389, -0.384583),
(0.297684, -0.112625),
(0.995377, -0.167282),
(0.694214, -0.110107),
(1, -0.302063),
(1, -0.302063),
(0.694214, -0.110107),
(0.866531, -0.38002),
(0.506287, -0.403488),
(0.297684, -0.112625),
(0.14389, -0.384583),
(0.14389, -0.384583),
(0.0833587, -0.359756),
(0.233337, -0.394058),
(0.322433, -0.376129),
(0.233337, -0.394058),
(0.300293, -0.161026),
(0.68367, -0.37384),
(0.692596, -0.158539),
(0.775269, -0.390625),
(0.923141, -0.354462),
(0.775269, -0.390625),
(0.692596, -0.158539),
(0.866531, -0.38002),
(0.648758, -0.559875),
(0.775269, -0.390625),
(0.322433, -0.376129),
(0.371979, -0.561615),
(0.233337, -0.394058),
(0.805695, -0.0856781),
(0.807114, -0.0601806),
(0.866714, -0.0859985),
(0.990631, -0.160507),
(0.866714, -0.0859985),
(0.995377, -0.167282),
(0.807114, -0.0601806),
(0.995377, -0.167282),
(0.866714, -0.0859985),
(0.182938, -0.0896148),
(0.121948, -0.0906982),
(0.180832, -0.0641479),
(0.297684, -0.112625),
(0.180832, -0.0641479),
(0, -0.173569),
(0.121948, -0.0906982),
(0, -0.173569),
(0.180832, -0.0641479),
(0.300293, -0.161026),
(0.297684, -0.112625),
(0.490265, 0),
(0.496658, -0.188034),
(0.490265, 0),
(0.692596, -0.158539),
(0.297684, -0.112625),
(0.694214, -0.110107),
(0.490265, 0),
(0.692596, -0.158539),
(0.490265, 0),
(0.694214, -0.110107))}
def loadIsland2():
return {
'texture': pyglet.image.load('island2tex.png').get_texture(),
'vertices': (
(-1.06761, -0.780533, 0.00448608),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -1.4328, 0.00448608),
(-1.72369, -1.4328, 0.00448608),
(-1.72369, -0.780533, 0.00448608),
(-1.06761, -1.4328, 0.00448608),
(-1.33263, -1.054, 1.45544),
(-1.23141, -0.776794, 1.4323),
(-1.61009, -0.955154, 1.42603),
(-1.61292, -1.15643, 0.484634),
(-1.72369, -1.4328, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.49678, -0.839905, 0.450378),
(-1.72369, -0.780533, 0.00448608),
(-1.61292, -1.15643, 0.484634),
(-1.50888, -0.677948, 1.40286),
(-1.23141, -0.776794, 1.4323),
(-1.55685, -0.556656, 1.00879),
(-1.23141, -0.776794, 1.4323),
(-1.33263, -1.054, 1.45544),
(-1.04933, -0.737442, 1.06262),
(-1.23445, -1.24449, 1.10498),
(-1.33263, -1.054, 1.45544),
(-1.74197, -1.23012, 1.06915),
(-1.61009, -0.955154, 1.42603),
(-1.50888, -0.677948, 1.40286),
(-1.74197, -1.23012, 1.06915),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -0.780533, 0.00448608),
(-1.49678, -0.839905, 0.450378),
(-1.29454, -1.2719, 0.497116),
(-1.06761, -1.4328, 0.00448608),
(-1.17841, -0.955368, 0.462875),
(-1.06761, -1.4328, 0.00448608),
(-1.72369, -0.780533, 0.00448608),
(-1.06761, -0.780533, 0.00448608),
(-1.50888, -0.677948, 1.40286),
(-1.61009, -0.955154, 1.42603),
(-1.23141, -0.776794, 1.4323),
(-1.06761, -1.4328, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.72369, -1.4328, 0.00448608),
(-1.61292, -1.15643, 0.484634),
(-1.72369, -0.780533, 0.00448608),
(-1.72369, -1.4328, 0.00448608),
(-1.04933, -0.737442, 1.06262),
(-1.55685, -0.556656, 1.00879),
(-1.23141, -0.776794, 1.4323),
(-1.23445, -1.24449, 1.10498),
(-1.04933, -0.737442, 1.06262),
(-1.33263, -1.054, 1.45544),
(-1.61009, -0.955154, 1.42603),
(-1.74197, -1.23012, 1.06915),
(-1.33263, -1.054, 1.45544),
(-1.55685, -0.556656, 1.00879),
(-1.74197, -1.23012, 1.06915),
(-1.50888, -0.677948, 1.40286),
(-1.72369, -0.780533, 0.00448608),
(-1.49678, -0.839905, 0.450378),
(-1.06761, -0.780533, 0.00448608),
(-1.29454, -1.2719, 0.497116),
(-1.74197, -1.23012, 1.06915),
(-1.61292, -1.15643, 0.484634),
(-1.23445, -1.24449, 1.10498),
(-1.74197, -1.23012, 1.06915),
(-1.29454, -1.2719, 0.497116),
(-1.49678, -0.839905, 0.450378),
(-1.04933, -0.737442, 1.06262),
(-1.17841, -0.955368, 0.462875),
(-1.55685, -0.556656, 1.00879),
(-1.04933, -0.737442, 1.06262),
(-1.49678, -0.839905, 0.450378),
(-1.23445, -1.24449, 1.10498),
(-1.29454, -1.2719, 0.497116),
(-1.04933, -0.737442, 1.06262),
(-1.29454, -1.2719, 0.497116),
(-1.17841, -0.955368, 0.462875),
(-1.04933, -0.737442, 1.06262),
(-1.55685, -0.556656, 1.00879),
(-1.49678, -0.839905, 0.450378),
(-1.74197, -1.23012, 1.06915),
(-1.49678, -0.839905, 0.450378),
(-1.61292, -1.15643, 0.484634),
(-1.74197, -1.23012, 1.06915),
(0.327194, -0.388901, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(0.301727, -0.201691, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(0.0621948, 0.128891, 2.25124),
(-0.215256, 0.0274505, 2.2326),
(0.163391, -0.149185, 2.25803),
(-0.303421, 0.0799713, 0.479523),
(0.139984, 0.241714, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(-0.141678, -0.363434, 0.479523),
(-0.303421, 0.0799713, 0.479523),
(-0.328873, -0.388901, 0.00752258),
(0.138596, 0.239395, 1.88083),
(0.0780181, 0.108582, 1.30341),
(-0.300415, 0.223694, 1.85136),
(-0.114059, -0.25061, 2.2394),
(-0.140274, -0.361115, 1.86212),
(0.163391, -0.149185, 2.25803),
(0.163391, -0.149185, 2.25803),
(0.298737, -0.200607, 1.89159),
(0.0621948, 0.128891, 2.25124),
(0.138596, 0.239395, 1.88083),
(-0.300415, 0.223694, 1.85136),
(0.0621948, 0.128891, 2.25124),
(-0.215256, 0.0274505, 2.2326),
(-0.300415, 0.223694, 1.85136),
(-0.114059, -0.25061, 2.2394),
(0.301727, -0.201691, 0.479523),
(-0.141678, -0.363434, 0.479523),
(0.327194, -0.388901, 0.00752258),
(0.139984, 0.241714, 0.479523),
(0.301727, -0.201691, 0.479523),
(0.327194, 0.267166, 0.00752258),
(0.327194, -0.388901, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(0.327194, 0.267166, 0.00752258),
(-0.114059, -0.25061, 2.2394),
(0.163391, -0.149185, 2.25803),
(-0.215256, 0.0274505, 2.2326),
(0.327194, 0.267166, 0.00752258),
(-0.328873, 0.267166, 0.00752258),
(0.139984, 0.241714, 0.479523),
(-0.328873, 0.267166, 0.00752258),
(-0.328873, -0.388901, 0.00752258),
(-0.303421, 0.0799713, 0.479523),
(0.301727, -0.201691, 0.479523),
(0.168594, -0.139725, 1.01736),
(-0.141678, -0.363434, 0.479523),
(0.0780181, 0.108582, 1.30341),
(0.168594, -0.139725, 1.01736),
(0.139984, 0.241714, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(-0.141678, -0.363434, 0.479523),
(-0.0796966, -0.230301, 1.30341),
(0.0780181, 0.108582, 1.30341),
(-0.170273, 0.0180053, 1.01736),
(-0.300415, 0.223694, 1.85136),
(-0.140274, -0.361115, 1.86212),
(-0.0796966, -0.230301, 1.30341),
(0.298737, -0.200607, 1.89159),
(0.138596, 0.239395, 1.88083),
(0.298737, -0.200607, 1.89159),
(0.0780181, 0.108582, 1.30341),
(-0.140274, -0.361115, 1.86212),
(-0.300415, 0.223694, 1.85136),
(-0.0796966, -0.230301, 1.30341),
(0.298737, -0.200607, 1.89159),
(0.163391, -0.149185, 2.25803),
(-0.140274, -0.361115, 1.86212),
(0.138596, 0.239395, 1.88083),
(0.0621948, 0.128891, 2.25124),
(0.298737, -0.200607, 1.89159),
(-0.215256, 0.0274505, 2.2326),
(0.0621948, 0.128891, 2.25124),
(-0.300415, 0.223694, 1.85136),
(-0.140274, -0.361115, 1.86212),
(-0.114059, -0.25061, 2.2394),
(-0.300415, 0.223694, 1.85136),
(-0.0796966, -0.230301, 1.30341),
(-0.300415, 0.223694, 1.85136),
(-0.170273, 0.0180053, 1.01736),
(0.298737, -0.200607, 1.89159),
(0.168594, -0.139725, 1.01736),
(0.0780181, 0.108582, 1.30341),
(0.298737, -0.200607, 1.89159),
(-0.0796966, -0.230301, 1.30341),
(0.168594, -0.139725, 1.01736),
(-0.303421, 0.0799713, 0.479523),
(-0.141678, -0.363434, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.168594, -0.139725, 1.01736),
(-0.0796966, -0.230301, 1.30341),
(-0.141678, -0.363434, 0.479523),
(-0.303421, 0.0799713, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.139984, 0.241714, 0.479523),
(0.0780181, 0.108582, 1.30341),
(0.139984, 0.241714, 0.479523),
(-0.170273, 0.0180053, 1.01736),
(0.301727, -0.201691, 0.479523),
(0.139984, 0.241714, 0.479523),
(0.168594, -0.139725, 1.01736),
(-0.328873, -0.388901, 0.00752258),
(0.327194, -0.388901, 0.00752258),
(-0.141678, -0.363434, 0.479523)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191),
'uvmap': (
(0.383682, 0),
(0.280838, -0.2034),
(0, 0),
(0, 0),
(0.383682, 0),
(0, 0),
(0.222824, -0.64386),
(0.38588, -0.633575),
(0.28096, -0.630798),
(0.162567, -0.213058),
(0, 0),
(0.0946502, -0.218597),
(0.348755, -0.197861),
(0.383682, 0),
(0.162567, -0.213058),
(0.444031, -0.620514),
(0.38588, -0.633575),
(0.515381, -0.445648),
(0.38588, -0.633575),
(0.222824, -0.64386),
(0.409027, -0.469528),
(0.110764, -0.488327),
(0.222824, -0.64386),
(0.119217, -0.472427),
(0.28096, -0.630798),
(0.444031, -0.620514),
(0.119217, -0.472427),
(0.280838, -0.2034),
(0.383682, 0),
(0.348755, -0.197861),
(0.0946502, -0.218597),
(0, 0),
(0.280838, -0.2034),
(0, 0),
(0.383682, 0),
(0.383682, 0),
(0.444031, -0.620514),
(0.28096, -0.630798),
(0.38588, -0.633575),
(0, 0),
(0.0946502, -0.218597),
(0, 0),
(0.162567, -0.213058),
(0.383682, 0),
(0, 0),
(0.409027, -0.469528),
(0.515381, -0.445648),
(0.38588, -0.633575),
(0.110764, -0.488327),
(0.409027, -0.469528),
(0.222824, -0.64386),
(0.28096, -0.630798),
(0.119217, -0.472427),
(0.222824, -0.64386),
(0.515381, -0.445648),
(0.119217, -0.472427),
(0.444031, -0.620514),
(0.383682, 0),
(0.348755, -0.197861),
(0.383682, 0),
(0.0946502, -0.218597),
(0.119217, -0.472427),
(0.162567, -0.213058),
(0.110764, -0.488327),
(0.119217, -0.472427),
(0.0946502, -0.218597),
(0.348755, -0.197861),
(0.409027, -0.469528),
(0.280838, -0.2034),
(0.515381, -0.445648),
(0.409027, -0.469528),
(0.348755, -0.197861),
(0.110764, -0.488327),
(0.0946502, -0.218597),
(0.409027, -0.469528),
(0.0946502, -0.218597),
(0.280838, -0.2034),
(0.409027, -0.469528),
(0.515381, -0.445648),
(0.348755, -0.197861),
(0.119217, -0.472427),
(0.348755, -0.197861),
(0.162567, -0.213058),
(0.119217, -0.472427),
(0.614059, -0.00134277),
(1, -0.00134277),
(0.724182, -0.210785),
(1, -0.00134277),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.918655, -0.996979),
(0.858978, -0.988708),
(0.755066, -1),
(0.889877, -0.210785),
(0.985016, -0.210785),
(1, -0.00134277),
(0.629044, -0.210785),
(0.889877, -0.210785),
(0.614059, -0.00134277),
(0.983658, -0.832611),
(0.906708, -0.576385),
(0.974411, -0.819534),
(0.695404, -0.99173),
(0.630402, -0.82431),
(0.755066, -1),
(0.755066, -1),
(0.724823, -0.837387),
(0.918655, -0.996979),
(0.983658, -0.832611),
(0.974411, -0.819534),
(0.918655, -0.996979),
(0.858978, -0.988708),
(0.974411, -0.819534),
(0.695404, -0.99173),
(0.724182, -0.210785),
(0.629044, -0.210785),
(0.614059, -0.00134277),
(0.985016, -0.210785),
(0.724182, -0.210785),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.614059, -0.00134277),
(1, -0.00134277),
(0.695404, -0.99173),
(0.755066, -1),
(0.858978, -0.988708),
(1, -0.00134277),
(1, -0.00134277),
(0.985016, -0.210785),
(1, -0.00134277),
(0.614059, -0.00134277),
(0.889877, -0.210785),
(0.724182, -0.210785),
(0.760635, -0.449448),
(0.629044, -0.210785),
(0.906708, -0.576385),
(0.760635, -0.449448),
(0.985016, -0.210785),
(0.853424, -0.449448),
(0.629044, -0.210785),
(0.707352, -0.576385),
(0.906708, -0.576385),
(0.853424, -0.449448),
(0.974411, -0.819534),
(0.630402, -0.82431),
(0.707352, -0.576385),
(0.724823, -0.837387),
(0.983658, -0.832611),
(0.724823, -0.837387),
(0.906708, -0.576385),
(0.630402, -0.82431),
(0.974411, -0.819534),
(0.707352, -0.576385),
(0.724823, -0.837387),
(0.755066, -1),
(0.630402, -0.82431),
(0.983658, -0.832611),
(0.918655, -0.996979),
(0.724823, -0.837387),
(0.858978, -0.988708),
(0.918655, -0.996979),
(0.974411, -0.819534),
(0.630402, -0.82431),
(0.695404, -0.99173),
(0.974411, -0.819534),
(0.707352, -0.576385),
(0.974411, -0.819534),
(0.853424, -0.449448),
(0.724823, -0.837387),
(0.760635, -0.449448),
(0.906708, -0.576385),
(0.724823, -0.837387),
(0.707352, -0.576385),
(0.760635, -0.449448),
(0.889877, -0.210785),
(0.629044, -0.210785),
(0.853424, -0.449448),
(0.760635, -0.449448),
(0.707352, -0.576385),
(0.629044, -0.210785),
(0.889877, -0.210785),
(0.853424, -0.449448),
(0.985016, -0.210785),
(0.906708, -0.576385),
(0.985016, -0.210785),
(0.853424, -0.449448),
(0.724182, -0.210785),
(0.985016, -0.210785),
(0.760635, -0.449448),
(0.614059, -0.00134277),
(0.614059, -0.00134277),
(0.629044, -0.210785))}
def loadIsland1():
return {
'texture': pyglet.image.load('island1tex.png').get_texture(),
'vertices': (
(-0.419022, 2.4046, 1.98764),
(-0.639664, 1.24741, 1.79893),
(-0.461929, 2.09698, 2.75305),
(-0.357971, -0.584793, 2.88612),
(0.961838, -1, 2.68634),
(-0.461929, 2.09698, 2.75305),
(0.961838, -1, 2.68634),
(0.923874, 2.09698, 2.60301),
(-0.461929, 2.09698, 2.75305),
(-0.961838, -0.584793, 2.3443),
(0, -1.53358, 2.38168),
(-0.357971, -0.584793, 2.88612),
(0.961838, -1, 2.68634),
(-0.357971, -0.584793, 2.88612),
(0, -1.53358, 2.38168),
(1.32661, 0.0526428, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(-1.16055, -0.0850982, -0.0107574),
(-0.739105, 2.50594, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(-1.12611, 1.30867, -0.00807189),
(0.669525, 0.656982, 1.76611),
(0.476822, -0.191589, 1.33751),
(-0.639664, 1.24741, 1.79893),
(-0.439804, 0.0430908, 1.34462),
(-0.722473, -1.08788, 1.83481),
(-0.639664, 1.24741, 1.79893),
(0.476822, -0.191589, 1.33751),
(0.699387, -0.355774, 0.974243),
(-0.439804, 0.0430908, 1.34462),
(-0.458038, -0.867325, 0.990585),
(0.458038, -0.867325, 0.990585),
(-0.722473, -1.08788, 1.83481),
(-0.458038, -0.867325, 0.990585),
(-0.439804, 0.0430908, 1.34462),
(-0.717773, -0.284958, 0.394806),
(-0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(0.458038, -0.867325, 0.990585),
(-0.717773, -0.284958, 0.394806),
(-1.16055, -0.0850982, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(0.669525, 0.656982, 1.76611),
(0.961838, -1, 2.68634),
(0.476822, -0.191589, 1.33751),
(0.609802, 1.23466, 1.76611),
(0.549988, 1.51712, 1.46841),
(0.419022, 2.49605, 1.98764),
(-0.288681, 2.24104, 1.08629),
(-0.325546, 1.63148, 1.08629),
(-0.419022, 2.4046, 1.98764),
(0.831543, 2.38768, -0.00807189),
(-0.739105, 2.50594, -0.00807189),
(0.228531, 2.12608, 1.08629),
(-0.373871, 1.71059, 0.521423),
(0.343185, 1.6566, 0.525848),
(-0.325546, 1.63148, 1.08629),
(1.03369, 1.14606, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(0.343185, 1.6566, 0.525848),
(-0.373871, 1.71059, 0.521423),
(-1.12611, 1.30867, -0.00807189),
(0.343185, 1.6566, 0.525848),
(0, 2.58858, 2.60301),
(-0.419022, 2.4046, 1.98764),
(-0.461929, 2.09698, 2.75305),
(0.609802, 1.23466, 1.76611),
(0.923874, 2.09698, 2.60301),
(0.669525, 0.656982, 1.76611),
(0, 2.58858, 2.60301),
(0.923874, 2.09698, 2.60301),
(0.419022, 2.49605, 1.98764),
(0.419022, 2.49605, 1.98764),
(0.923874, 2.09698, 2.60301),
(0.609802, 1.23466, 1.76611),
(-0.722473, -1.08788, 1.83481),
(0.722473, -1.14145, 1.87538),
(0, -1.53358, 2.38168),
(-0.461929, 2.09698, 2.75305),
(-0.639664, 1.24741, 1.79893),
(-0.357971, -0.584793, 2.88612),
(-0.461929, 2.09698, 2.75305),
(0.923874, 2.09698, 2.60301),
(0, 2.58858, 2.60301),
(-1.08363, -1.33919, -0.0107574),
(-1.16055, -0.0850982, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(0.669525, 0.656982, 1.76611),
(-0.639664, 1.24741, 1.79893),
(0.609802, 1.23466, 1.76611),
(1.03369, 1.14606, -0.00807189),
(-1.12611, 1.30867, -0.00807189),
(0.831543, 2.38768, -0.00807189),
(-0.439804, 0.0430908, 1.34462),
(-0.639664, 1.24741, 1.79893),
(0.476822, -0.191589, 1.33751),
(0.722473, -1.14145, 1.87538),
(-0.722473, -1.08788, 1.83481),
(0.458038, -0.867325, 0.990585),
(-0.722473, -1.08788, 1.83481),
(-0.439804, 0.0430908, 1.34462),
(-0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(-0.458038, -0.867325, 0.990585),
(-0.717773, -0.284958, 0.394806),
(-1.16055, -0.0850982, -0.0107574),
(-0.717773, -0.284958, 0.394806),
(1.32661, 0.0526428, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(1.32661, 0.0526428, -0.0107574),
(0.699387, -0.355774, 0.974243),
(-1.08363, -1.33919, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(-1.16055, -0.0850982, -0.0107574),
(-0.419022, 2.4046, 1.98764),
(0.419022, 2.49605, 1.98764),
(0.228531, 2.12608, 1.08629),
(0.609802, 1.23466, 1.76611),
(-0.639664, 1.24741, 1.79893),
(0.549988, 1.51712, 1.46841),
(0.228531, 2.12608, 1.08629),
(-0.288681, 2.24104, 1.08629),
(-0.419022, 2.4046, 1.98764),
(-0.325546, 1.63148, 1.08629),
(-0.288681, 2.24104, 1.08629),
(-0.373871, 1.71059, 0.521423),
(-1.12611, 1.30867, -0.00807189),
(-0.373871, 1.71059, 0.521423),
(-0.739105, 2.50594, -0.00807189),
(1.03369, 1.14606, -0.00807189),
(0.343185, 1.6566, 0.525848),
(-1.12611, 1.30867, -0.00807189),
(0.419022, 2.49605, 1.98764),
(-0.419022, 2.4046, 1.98764),
(0, 2.58858, 2.60301),
(0, -1.53358, 2.38168),
(-0.961838, -0.584793, 2.3443),
(-0.722473, -1.08788, 1.83481),
(-0.639664, 1.24741, 1.79893),
(-0.722473, -1.08788, 1.83481),
(-0.961838, -0.584793, 2.3443),
(0, -1.53358, 2.38168),
(0.722473, -1.14145, 1.87538),
(0.961838, -1, 2.68634),
(0.923874, 2.09698, 2.60301),
(0.961838, -1, 2.68634),
(0.669525, 0.656982, 1.76611),
(0.961838, -1, 2.68634),
(0.722473, -1.14145, 1.87538),
(0.476822, -0.191589, 1.33751),
(0.458038, -0.867325, 0.990585),
(-0.670227, -1.06004, 0.419617),
(1.16219, -1.20143, -0.0107574),
(0.458038, -0.867325, 0.990585),
(1.16219, -1.20143, -0.0107574),
(0.699387, -0.355774, 0.974243),
(-1.08363, -1.33919, -0.0107574),
(1.16219, -1.20143, -0.0107574),
(-0.670227, -1.06004, 0.419617),
(-0.373871, 1.71059, 0.521423),
(-0.288681, 2.24104, 1.08629),
(-0.739105, 2.50594, -0.00807189),
(0.343185, 1.6566, 0.525848),
(0.831543, 2.38768, -0.00807189),
(0.228531, 2.12608, 1.08629),
(-0.288681, 2.24104, 1.08629),
(0.228531, 2.12608, 1.08629),
(-0.739105, 2.50594, -0.00807189),
(0.699387, -0.355774, 0.974243),
(-0.717773, -0.284958, 0.394806),
(-0.439804, 0.0430908, 1.34462),
(-0.325546, 1.63148, 1.08629),
(0.343185, 1.6566, 0.525848),
(0.549988, 1.51712, 1.46841),
(0.343185, 1.6566, 0.525848),
(0.228531, 2.12608, 1.08629),
(0.549988, 1.51712, 1.46841),
(0.549988, 1.51712, 1.46841),
(-0.639664, 1.24741, 1.79893),
(-0.325546, 1.63148, 1.08629),
(-0.639664, 1.24741, 1.79893),
(-0.419022, 2.4046, 1.98764),
(-0.325546, 1.63148, 1.08629),
(0.549988, 1.51712, 1.46841),
(0.228531, 2.12608, 1.08629),
(0.419022, 2.49605, 1.98764),
(-0.961838, -0.584793, 2.3443),
(-0.357971, -0.584793, 2.88612),
(-0.639664, 1.24741, 1.79893),
(-0.717773, -0.284958, 0.394806),
(0.699387, -0.355774, 0.974243),
(1.32661, 0.0526428, -0.0107574),
(0.722473, -1.14145, 1.87538),
(0.458038, -0.867325, 0.990585),
(0.476822, -0.191589, 1.33751),
(0.699387, -0.355774, 0.974243),
(0.476822, -0.191589, 1.33751),
(0.458038, -0.867325, 0.990585)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23,
24, 25, 26,
27, 28, 29,
30, 31, 32,
33, 34, 35,
36, 37, 38,
39, 40, 41,
42, 43, 44,
45, 46, 47,
48, 49, 50,
51, 52, 53,
54, 55, 56,
57, 58, 59,
60, 61, 62,
63, 64, 65,
66, 67, 68,
69, 70, 71,
72, 73, 74,
75, 76, 77,
78, 79, 80,
81, 82, 83,
84, 85, 86,
87, 88, 89,
90, 91, 92,
93, 94, 95,
96, 97, 98,
99, 100, 101,
102, 103, 104,
105, 106, 107,
108, 109, 110,
111, 112, 113,
114, 115, 116,
117, 118, 119,
120, 121, 122,
123, 124, 125,
126, 127, 128,
129, 130, 131,
132, 133, 134,
135, 136, 137,
138, 139, 140,
141, 142, 143,
144, 145, 146,
147, 148, 149,
150, 151, 152,
153, 154, 155,
156, 157, 158,
159, 160, 161,
162, 163, 164,
165, 166, 167,
168, 169, 170,
171, 172, 173,
174, 175, 176,
177, 178, 179,
180, 181, 182,
183, 184, 185,
186, 187, 188,
189, 190, 191,
192, 193, 194,
195, 196, 197),
'uvmap': (
(0.955368, -0.689835),
(0.674637, -0.624695),
(0.880737, -0.954056),
(0.230164, -1),
(0.12944, -0.93103),
(0.880737, -0.954056),
(0.12944, -0.93103),
(0.880737, -0.902267),
(0.880737, -0.954056),
(0.230164, -0.812958),
(0, -0.825867),
(0.230164, -1),
(0.12944, -0.93103),
(0.230164, -1),
(0, -0.825867),
(0.384796, 0),
(0.0805664, 0),
(0.351379, 0),
(0.97995, -0.000915527),
(0.951263, -0.000915527),
(0.689499, -0.000915527),
(0.531403, -0.613373),
(0.325546, -0.465424),
(0.674637, -0.624695),
(0.382477, -0.467865),
(0.108109, -0.637085),
(0.674637, -0.624695),
(0.325546, -0.465424),
(0.285721, -0.340012),
(0.382477, -0.467865),
(0.161621, -0.345657),
(0.161621, -0.345657),
(0.108109, -0.637085),
(0.161621, -0.345657),
(0.382477, -0.467865),
(0.302902, -0.139999),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.161621, -0.345657),
(0.302902, -0.139999),
(0.351379, 0),
(0.114868, -0.14856),
(0.531403, -0.613373),
(0.12944, -0.93103),
(0.325546, -0.465424),
(0.671539, -0.613373),
(0.740067, -0.510605),
(0.977554, -0.689835),
(0.91568, -0.378693),
(0.767807, -0.378693),
(0.955368, -0.689835),
(0.951263, -0.000915527),
(0.97995, -0.000915527),
(0.887802, -0.378693),
(0.787003, -0.183701),
(0.773911, -0.185226),
(0.767807, -0.378693),
(0.650055, -0.000915527),
(0.951263, -0.000915527),
(0.773911, -0.185226),
(0.787003, -0.183701),
(0.689499, -0.000915527),
(0.773911, -0.185226),
(1, -0.902267),
(0.955368, -0.689835),
(0.880737, -0.954056),
(0.671539, -0.613373),
(0.880737, -0.902267),
(0.531403, -0.613373),
(1, -0.902267),
(0.880737, -0.902267),
(0.977554, -0.689835),
(0.977554, -0.689835),
(0.880737, -0.902267),
(0.671539, -0.613373),
(0.108109, -0.637085),
(0.0951232, -0.651093),
(0, -0.825867),
(0.880737, -0.954056),
(0.674637, -0.624695),
(0.230164, -1),
(0.880737, -0.954056),
(0.880737, -0.902267),
(1, -0.902267),
(0.0471496, 0),
(0.351379, 0),
(0.0805664, 0),
(0.531403, -0.613373),
(0.674637, -0.624695),
(0.671539, -0.613373),
(0.650055, -0.000915527),
(0.689499, -0.000915527),
(0.951263, -0.000915527),
(0.382477, -0.467865),
(0.674637, -0.624695),
(0.325546, -0.465424),
(0.0951232, -0.651093),
(0.108109, -0.637085),
(0.161621, -0.345657),
(0.108109, -0.637085),
(0.382477, -0.467865),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.161621, -0.345657),
(0.302902, -0.139999),
(0.351379, 0),
(0.302902, -0.139999),
(0.384796, 0),
(0.0805664, 0),
(0.384796, 0),
(0.285721, -0.340012),
(0.0471496, 0),
(0.114868, -0.14856),
(0.351379, 0),
(0.955368, -0.689835),
(0.977554, -0.689835),
(0.887802, -0.378693),
(0.671539, -0.613373),
(0.674637, -0.624695),
(0.740067, -0.510605),
(0.887802, -0.378693),
(0.91568, -0.378693),
(0.955368, -0.689835),
(0.767807, -0.378693),
(0.91568, -0.378693),
(0.787003, -0.183701),
(0.689499, -0.000915527),
(0.787003, -0.183701),
(0.97995, -0.000915527),
(0.650055, -0.000915527),
(0.773911, -0.185226),
(0.689499, -0.000915527),
(0.977554, -0.689835),
(0.955368, -0.689835),
(1, -0.902267),
(0, -0.825867),
(0.230164, -0.812958),
(0.108109, -0.637085),
(0.674637, -0.624695),
(0.108109, -0.637085),
(0.230164, -0.812958),
(0, -0.825867),
(0.0951232, -0.651093),
(0.12944, -0.93103),
(0.880737, -0.902267),
(0.12944, -0.93103),
(0.531403, -0.613373),
(0.12944, -0.93103),
(0.0951232, -0.651093),
(0.325546, -0.465424),
(0.161621, -0.345657),
(0.114868, -0.14856),
(0.0805664, 0),
(0.161621, -0.345657),
(0.0805664, 0),
(0.285721, -0.340012),
(0.0471496, 0),
(0.0805664, 0),
(0.114868, -0.14856),
(0.787003, -0.183701),
(0.91568, -0.378693),
(0.97995, -0.000915527),
(0.773911, -0.185226),
(0.951263, -0.000915527),
(0.887802, -0.378693),
(0.91568, -0.378693),
(0.887802, -0.378693),
(0.97995, -0.000915527),
(0.285721, -0.340012),
(0.302902, -0.139999),
(0.382477, -0.467865),
(0.767807, -0.378693),
(0.773911, -0.185226),
(0.740067, -0.510605),
(0.773911, -0.185226),
(0.887802, -0.378693),
(0.740067, -0.510605),
(0.740067, -0.510605),
(0.674637, -0.624695),
(0.767807, -0.378693),
(0.674637, -0.624695),
(0.955368, -0.689835),
(0.767807, -0.378693),
(0.740067, -0.510605),
(0.887802, -0.378693),
(0.977554, -0.689835),
(0.230164, -0.812958),
(0.230164, -1),
(0.674637, -0.624695),
(0.302902, -0.139999),
(0.285721, -0.340012),
(0.384796, 0),
(0.0951232, -0.651093),
(0.161621, -0.345657),
(0.325546, -0.465424),
(0.285721, -0.340012),
(0.325546, -0.465424),
(0.161621, -0.345657))}
def loadCheckpoint():
return {
'texture': None,
'vertices': (
(-0.00740051, -0.00157165, 1.41418),
(1.17003, -1.13281, 0.00520324),
(1.17186, 1.12926, 0.00704956),
(1.17186, 1.12926, 0.00704956),
(1.17003, -1.13281, 0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-0.00740051, -0.00157165, 1.41418),
(-1.17186, -1.12926, -0.00704956),
(1.17003, -1.13281, 0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-1.17003, 1.13281, -0.00520324),
(1.17186, 1.12926, 0.00704956),
(-1.17186, -1.12926, -0.00704956),
(-1.17003, 1.13281, -0.00520324),
(0.00740051, 0.00157165, -1.41418),
(-0.00740051, -0.00157165, 1.41418),
(-1.17003, 1.13281, -0.00520324),
(-1.17186, -1.12926, -0.00704956),
(0.00740051, 0.00157165, -1.41418),
(1.17003, -1.13281, 0.00520324),
(-1.17186, -1.12926, -0.00704956),
(-0.00740051, -0.00157165, 1.41418),
(1.17186, 1.12926, 0.00704956),
(-1.17003, 1.13281, -0.00520324)),
'indices': (
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
18, 19, 20,
21, 22, 23),
'uvmap': (
(0.015625, -0.960938),
(0.960938, -0.96875),
(0.476563, -0.476563),
(0.476563, -0.476563),
(0.960938, -0.96875),
(0.96875, -0.0234375),
(0.015625, -0.960938),
(0.507813, -0.507813),
(0.960938, -0.96875),
(0.96875, -0.0234375),
(0.0234375, -0.015625),
(0.476563, -0.476563),
(0.507813, -0.507813),
(0.0234375, -0.015625),
(0.96875, -0.0234375),
(0.015625, -0.960938),
(0.0234375, -0.015625),
(0.507813, -0.507813),
(0.96875, -0.0234375),
(0.960938, -0.96875),
(0.507813, -0.507813),
(0.015625, -0.960938),
(0.476563, -0.476563),
(0.0234375, -0.015625))}
def loadTest(): # test cube
return {
'texture': pyglet.image.load('watertex.png').get_texture(),
'vertices': (
# front
(-0.5, -0.5, 0.5), ( 0.5, -0.5, 0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), (-0.5, 0.5, 0.5), (-0.5, -0.5, 0.5),
# back
(-0.5, -0.5, -0.5), (-0.5, 0.5, -0.5), ( 0.5, 0.5, -0.5),
( 0.5, 0.5, -0.5), ( 0.5, -0.5, -0.5), (-0.5, -0.5, -0.5),
# top
(-0.5, 0.5, -0.5), (-0.5, 0.5, 0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), ( 0.5, 0.5, -0.5), (-0.5, 0.5, -0.5),
# bottom
(-0.5, -0.5, -0.5), ( 0.5, -0.5, -0.5), ( 0.5, -0.5, 0.5),
( 0.5, -0.5, 0.5), (-0.5, -0.5, 0.5), (-0.5, -0.5, -0.5),
# right
( 0.5, -0.5, -0.5), ( 0.5, 0.5, -0.5), ( 0.5, 0.5, 0.5),
( 0.5, 0.5, 0.5), ( 0.5, -0.5, 0.5), ( 0.5, -0.5, -0.5),
# left
(-0.5, -0.5, -0.5), (-0.5, -0.5, 0.5), (-0.5, 0.5, 0.5),
(-0.5, 0.5, 0.5), (-0.5, 0.5, -0.5), (-0.5, -0.5, -0.5)),
'indices': (
0, 1, 2, 3, 4, 5, # front
6, 7, 8, 9, 10, 11, # back
12, 13, 14, 15, 16, 17, # top
18, 19, 20, 21, 22, 23, # bottom
24, 25, 26, 27, 28, 29, # right
30, 31, 32, 33, 34, 35), # left
'uvmap': (
# front
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# back
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# top
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# bottom
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# right
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0),
# left
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(1.0, 1.0), (0.0, 1.0), (0.0, 0.0))}
| 28.531789 | 64 | 0.511472 | 8,636 | 52,955 | 3.135711 | 0.087772 | 0.010487 | 0.0113 | 0.015066 | 0.970162 | 0.765399 | 0.743796 | 0.57873 | 0.548523 | 0.407939 | 0 | 0.65191 | 0.231763 | 52,955 | 1,855 | 65 | 28.54717 | 0.013741 | 0.002039 | 0 | 0.969962 | 0 | 0 | 0.004434 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003277 | true | 0 | 0.000546 | 0.003277 | 0.0071 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
97472731f389b1564abb61d6a94ab743d11a5e7b | 11,723 | py | Python | Day5/run_tests.py | lukasHD/adventofcode2019 | 409a2008de43ee70258e02de5a9aad27aa18c67a | [
"MIT"
] | null | null | null | Day5/run_tests.py | lukasHD/adventofcode2019 | 409a2008de43ee70258e02de5a9aad27aa18c67a | [
"MIT"
] | null | null | null | Day5/run_tests.py | lukasHD/adventofcode2019 | 409a2008de43ee70258e02de5a9aad27aa18c67a | [
"MIT"
] | null | null | null | import day5
import unittest
import io
import re
from unittest.mock import patch
# run tests like this `python -m unittest day1.py -v`
class TestRunner(unittest.TestCase):
def test_day2_1_1(self):
self.assertEqual(day5.runIntcode([1,9,10,3,2,3,11,0,99,30,40,50]), [3500,9,10,70,2,3,11,0,99,30,40,50])
def test_day2_1_2(self):
self.assertEqual(day5.runIntcode([1,0,0,0,99]), [2,0,0,0,99])
def test_day2_1_3(self):
self.assertEqual(day5.runIntcode([2,3,0,3,99]), [2,3,0,6,99])
def test_day2_1_4(self):
self.assertEqual(day5.runIntcode([2,4,4,5,99,0]), [2,4,4,5,99,9801])
def test_day2_1_5(self):
self.assertEqual(day5.runIntcode([1,1,1,4,99,5,6,0,99]), [30,1,1,4,2,5,6,0,99])
def test_day5_1_1(self):
self.assertEqual(day5.runCode([1,9,10,3,2,3,11,0,99,30,40,50]), [3500,9,10,70,2,3,11,0,99,30,40,50])
def test_day5_1_2(self):
self.assertEqual(day5.runCode([1,0,0,0,99]), [2,0,0,0,99])
def test_day5_1_3(self):
self.assertEqual(day5.runCode([2,3,0,3,99]), [2,3,0,6,99])
def test_day5_1_4(self):
self.assertEqual(day5.runCode([2,4,4,5,99,0]), [2,4,4,5,99,9801])
def test_day5_1_5(self):
self.assertEqual(day5.runCode([1,1,1,4,99,5,6,0,99]), [30,1,1,4,2,5,6,0,99])
def test_day5_1_6(self):
self.assertEqual(day5.runCode([1002,4,3,4,33]), [1002,4,3,4,99])
def test_day5_1_7(self):
self.assertEqual(day5.runCode([1101,100,-1,4,0]), [1101,100,-1,4,99])
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_inp_1(self, input, mock_stdout):
input.return_value = input_data = "123"
day5.runCode([3,0,4,0,99])
#print(mock_stdout.getvalue())
regex = r"Output is: (\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], input_data)
def getLastOutput(self, string, nothing):
regex = r"Output is: (\d+)"
matches = re.findall(regex, string, re.MULTILINE)
return matches[-1]
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_inp_2(self, input, mock_stdout):
input.return_value = input_data = "-456"
day5.runCode([3,0,4,0,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], input_data)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_1(self, input, mock_stdout):
input.return_value = "8"
expected = "1"
day5.runCode([3,9,8,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_2(self, input, mock_stdout):
input.return_value = "7"
expected = "0"
day5.runCode([3,9,8,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_3(self, input, mock_stdout):
input.return_value = "-9"
expected = "0"
day5.runCode([3,9,8,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
# immediate mode
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_4(self, input, mock_stdout):
input.return_value = "8"
expected = "1"
day5.runCode([3,3,1108,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_5(self, input, mock_stdout):
input.return_value = "7"
expected = "0"
day5.runCode([3,3,1108,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_eql_6(self, input, mock_stdout):
input.return_value = "-9"
expected = "0"
day5.runCode([3,3,1108,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_1(self, input, mock_stdout):
input.return_value = "7"
expected = "1"
day5.runCode([3,9,7,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_2(self, input, mock_stdout):
input.return_value = "8"
expected = "0"
day5.runCode([3,9,7,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_3(self, input, mock_stdout):
input.return_value = "9"
expected = "0"
day5.runCode([3,9,7,9,10,9,4,9,99,-1,8])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
# immediate mode
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_4(self, input, mock_stdout):
input.return_value = "7"
expected = "1"
day5.runCode([3,3,1107,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_5(self, input, mock_stdout):
input.return_value = "8"
expected = "0"
day5.runCode([3,3,1107,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_les_6(self, input, mock_stdout):
input.return_value = "9"
expected = "0"
day5.runCode([3,3,1107,-1,8,3,4,3,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_1(self, input, mock_stdout):
input.return_value = "0"
expected = "0"
day5.runCode([3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_2(self, input, mock_stdout):
input.return_value = "-100"
expected = "1"
day5.runCode([3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_3(self, input, mock_stdout):
input.return_value = "123"
expected = "1"
day5.runCode([3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
# immediate mode
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_4(self, input, mock_stdout):
input.return_value = "0"
expected = "0"
day5.runCode([3,3,1105,-1,9,1101,0,0,12,4,12,99,1])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_5(self, input, mock_stdout):
input.return_value = "-100"
expected = "1"
day5.runCode([3,3,1105,-1,9,1101,0,0,12,4,12,99,1])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_jmp_6(self, input, mock_stdout):
input.return_value = "123"
expected = "1"
day5.runCode([3,3,1105,-1,9,1101,0,0,12,4,12,99,1])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
# test complex
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_run_1(self, input, mock_stdout):
input.return_value = "7"
expected = "999"
day5.runCode([3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_run_2(self, input, mock_stdout):
input.return_value = "8"
expected = "1000"
day5.runCode([3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
@patch('sys.stdout', new_callable=io.StringIO)
@patch.object(day5, "input", create=True)
def test_run_3(self, input, mock_stdout):
input.return_value = "9"
expected = "1001"
day5.runCode([3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99])
regex = r"Output is: (-*\d+)"
matches = re.findall(regex, mock_stdout.getvalue(), re.MULTILINE)
self.assertEqual(matches[-1], expected)
if __name__ == '__main__':
unittest.main() | 36.981073 | 176 | 0.612812 | 1,816 | 11,723 | 3.852974 | 0.061674 | 0.067172 | 0.061741 | 0.048021 | 0.951551 | 0.93926 | 0.890953 | 0.88638 | 0.884379 | 0.866657 | 0 | 0.115352 | 0.204299 | 11,723 | 317 | 177 | 36.981073 | 0.634756 | 0.011857 | 0 | 0.738589 | 0 | 0 | 0.073415 | 0 | 0 | 0 | 0 | 0 | 0.145228 | 1 | 0.149378 | false | 0 | 0.020747 | 0 | 0.178423 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9756b8eac16e0f1be55cc935e135695a49b24738 | 5,350 | bzl | Python | pycross/private/target_environment.bzl | jvolkman/rules_python_cross | 7d6426a9c929c8a789d0edb502f29474ecf8a63c | [
"Apache-2.0"
] | 4 | 2022-03-29T13:58:58.000Z | 2022-03-31T11:10:28.000Z | pycross/private/target_environment.bzl | jvolkman/rules_python_cross | 7d6426a9c929c8a789d0edb502f29474ecf8a63c | [
"Apache-2.0"
] | null | null | null | pycross/private/target_environment.bzl | jvolkman/rules_python_cross | 7d6426a9c929c8a789d0edb502f29474ecf8a63c | [
"Apache-2.0"
] | null | null | null | """Implementation of the pycross_target_environment rule."""
load(":providers.bzl", "PycrossTargetEnvironmentInfo")
def fully_qualified_label(label):
return "@%s//%s:%s" % (label.workspace_name, label.package, label.name)
def _target_python_impl(ctx):
f = ctx.actions.declare_file(ctx.attr.name + ".json")
args = [
"--name",
ctx.attr.name,
"--output",
f.path,
"--implementation",
ctx.attr.implementation,
"--version",
ctx.attr.version,
]
for abi in ctx.attr.abis:
args.extend(["--abi", abi])
for platform in ctx.attr.platforms:
args.extend(["--platform", platform])
for constraint in ctx.attr.python_compatible_with:
args.extend([
"--python-compatible-with",
fully_qualified_label(constraint.label),
])
for key, val in ctx.attr.envornment_markers.items():
args.extend([
"--environment-marker",
"%s=%s" % (key, val),
])
ctx.actions.run(
outputs = [f],
executable = ctx.executable._tool,
arguments = args,
)
return [
PycrossTargetEnvironmentInfo(
python_compatible_with=ctx.attr.python_compatible_with,
file=f,
),
DefaultInfo(
files=depset([f])
),
]
pycross_target_environment = rule(
implementation = _target_python_impl,
attrs = {
"implementation": attr.string(
doc = (
"The PEP 425 implementation abbreviation " +
"(defaults to 'cp' for CPython)."
),
mandatory = False,
default = "cp",
),
"version": attr.string(
doc = "The python version.",
mandatory = True,
),
"abis": attr.string_list(
doc = "A list of PEP 425 abi tags.",
mandatory = False,
default = [],
),
"platforms": attr.string_list(
doc = "A list of PEP 425 platform tags.",
mandatory = False,
default = [],
),
"python_compatible_with": attr.label_list(
doc = (
"A list of constraints that, when satisfied, indicates this " +
"target_platform should be selected."
),
mandatory = True,
allow_empty = False,
),
"envornment_markers": attr.string_dict(
doc = "Environment marker overrides.",
mandatory = False,
default = {},
),
"_tool": attr.label(
default = Label("//pycross/private/tools:target_environment_generator"),
cfg = "host",
executable = True,
),
}
)
def _macos_target_python_impl(ctx):
f = ctx.actions.declare_file(ctx.attr.name + ".json")
args = [
"--name",
ctx.attr.name,
"--output",
f.path,
"--implementation",
ctx.attr.implementation,
"--version",
ctx.attr.version,
]
for abi in ctx.attr.abis:
args.extend(["--abi", abi])
for platform in ctx.attr.platforms:
args.extend(["--platform", platform])
for constraint in ctx.attr.python_compatible_with:
args.extend([
"--python-compatible-with",
fully_qualified_label(constraint.label),
])
for key, val in ctx.attr.envornment_markers.items():
args.extend([
"--environment-marker",
"%s=%s" % (key, val),
])
ctx.actions.run(
outputs = [f],
executable = ctx.executable._tool,
arguments = args,
)
return [
PycrossTargetEnvironmentInfo(
python_compatible_with=ctx.attr.python_compatible_with,
file=f,
),
DefaultInfo(
files=depset([f])
),
]
pycross_macos_environment = rule(
implementation = _macos_target_python_impl,
attrs = {
"implementation": attr.string(
doc = (
"The PEP 425 implementation abbreviation " +
"(defaults to 'cp' for CPython)."
),
mandatory = False,
default = "cp",
),
"version": attr.string(
doc = "The python version.",
mandatory = True,
),
"abis": attr.string_list(
doc = "A list of PEP 425 abi tags.",
mandatory = False,
default = [],
),
"platforms": attr.string_list(
doc = "A list of PEP 425 platform tags.",
mandatory = False,
default = [],
),
"python_compatible_with": attr.label_list(
doc = (
"A list of constraints that, when satisfied, indicates this " +
"target_platform should be selected."
),
mandatory = True,
allow_empty = False,
),
"envornment_markers": attr.string_dict(
doc = "Environment marker overrides.",
mandatory = False,
default = {},
),
"_tool": attr.label(
default = Label("//pycross/private/tools:target_environment_generator"),
cfg = "host",
executable = True,
),
}
)
| 27.020202 | 84 | 0.514019 | 491 | 5,350 | 5.460285 | 0.189409 | 0.046997 | 0.074599 | 0.026856 | 0.906378 | 0.906378 | 0.906378 | 0.906378 | 0.906378 | 0.906378 | 0 | 0.005303 | 0.365607 | 5,350 | 197 | 85 | 27.15736 | 0.78462 | 0.010093 | 0 | 0.890173 | 0 | 0 | 0.205293 | 0.042344 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017341 | false | 0 | 0 | 0.00578 | 0.034682 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
97b20ff31cf70f272c13cb7575820833ed214244 | 94 | py | Python | tests/__init__.py | DeeUnderscore/perkeeppy | 010023948e76e00b91c8aecf7f975a332a30f0c9 | [
"MIT"
] | 4 | 2018-10-24T18:59:35.000Z | 2019-06-29T16:33:22.000Z | tests/__init__.py | DeeUnderscore/perkeeppy | 010023948e76e00b91c8aecf7f975a332a30f0c9 | [
"MIT"
] | 2 | 2019-05-26T22:59:16.000Z | 2019-05-28T22:51:21.000Z | tests/__init__.py | DeeUnderscore/perkeeppy | 010023948e76e00b91c8aecf7f975a332a30f0c9 | [
"MIT"
] | 2 | 2018-10-25T13:38:29.000Z | 2019-05-26T19:20:08.000Z | from unittest import TestLoader
def get_tests():
return TestLoader().discover('tests/')
| 15.666667 | 42 | 0.734043 | 11 | 94 | 6.181818 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148936 | 94 | 5 | 43 | 18.8 | 0.85 | 0 | 0 | 0 | 0 | 0 | 0.06383 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
97d1368098fdb6aeede4ac35339639430ec3cd70 | 131 | py | Python | asana/resources/user_task_lists.py | shubhamdipt/python-asana | 8e05328fe8605638128be9fce449fbd34a646e01 | [
"MIT"
] | null | null | null | asana/resources/user_task_lists.py | shubhamdipt/python-asana | 8e05328fe8605638128be9fce449fbd34a646e01 | [
"MIT"
] | null | null | null | asana/resources/user_task_lists.py | shubhamdipt/python-asana | 8e05328fe8605638128be9fce449fbd34a646e01 | [
"MIT"
] | null | null | null |
from .gen.user_task_lists import _UserTaskLists
class UserTaskLists(_UserTaskLists):
"""User Task Lists resource"""
pass
| 18.714286 | 47 | 0.755725 | 15 | 131 | 6.333333 | 0.666667 | 0.168421 | 0.273684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.160305 | 131 | 6 | 48 | 21.833333 | 0.863636 | 0.183206 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
c10210739b5fff26d874d58eeee1fd16a1a0f1e1 | 41,213 | py | Python | lyka.py | dariusgab/LYKAbot | b2f1a0a12fa89e166d11a0ec1035fd0f4e86c515 | [
"CC0-1.0"
] | null | null | null | lyka.py | dariusgab/LYKAbot | b2f1a0a12fa89e166d11a0ec1035fd0f4e86c515 | [
"CC0-1.0"
] | null | null | null | lyka.py | dariusgab/LYKAbot | b2f1a0a12fa89e166d11a0ec1035fd0f4e86c515 | [
"CC0-1.0"
] | null | null | null | import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJzsvQecFMeVP17VPUtYksggJGgkIYEQMGEngSS0iWURC6uZhYWVZTy7r1kaZmeWmVnYlcTZZ9mWnC3neM45nNOd0zme0/nOZ59zOp9zzvb5zun8r/eqe6aqpmcBGfuv+31OKnp7qqqrq16F960XqseY/99n5zB2i/hbXbuaMRCBsyJjI/V7zkZ4cG+xESu4t9mIHdxH2EgkuG9jI23B/Rw2Mie4n8tG5rKZy4Kf89jIPDZcWs8i7nx2qp1VnsI45y5jJ9sZWOwezniJsyPuPHZyAbtHVGghPWmz4iI2sZiNLGYcIuxRImEJgza6EWXPoZulDOYymMdgPgNR2gIG4uFFDBYzEJlFNpFhGRtfyEaWMVjOYAWDlQxWMVjNYA2DtQwuZ7COwRUMrmSwnsEGBg6DjQyuYnA1g2sYbGJwLYPrGGxmsIXB9Qy2MriBwTYG2xnsYBBlEGMQZ5Bg0MEgySDFIM0gwyDLYCeDXQxuZHATg5sZ7GZwC4NOBl0Muhn0MOhlsIdBH4O9DPoZ7GNwK4P9DAYYHGBwkMEgg9sY5BjkGQwxOMTgMINhBkcYHGUwwuB2Bg9hcAeDhzI4xuBhDAoMRhmMMQAGLoPjDMYZnGDgMTjJ4BSDIoMJBiUGZQaTDE4zqDCoMqgxmGJwhsFZBtMMZhjcyeAuBnczOMfgrxg8nMEjGPw1g0cyuIfBoxg8msFjGNzL4D4Gj2XwOAaPZ/AEBk9k8CQGT2bwFAb3M3gqg6cxeDqDZzB4JoNnMXg2g+cweC6D5zF4PoMXMPgbBi9k8CIGL2bwEgYvZfAyBi9n8AoGr2TwKgavZvAaBq9l8DoGr2fwBgZ/y+CNDN7E4M0M3sLgrQz+jsHfM3gbg7czeAeDdzJ4F4N/YPBuBu9h8F4G72PwfgYfYPCPDD7I4EMMPszgIww+yuCfGHyMwT8z+BcGH2fwrww+weCTDP6NwacYfJrBZxh8lsHnGHyewRcYfJHBlxh8mcFXGPw7g68y+A8GX2PwdQbfYPBNBt9i8G0G32HwXQbfY/B9Bj9g8EMGP2LwYwY/YfBTBj9j8HMGv2DwSwa/YvCfDH7N4L8Y/DeD3zD4LYPfMfg9gz8w+B8GfxRzgwPnYHGwxbzg0MZhDoe5HOZxmM+hncMCDgs5LOKwmMMSDpdxWMphGYflHFZwWMlhFYfVHNZwWMvhcg7rOFzB4UoO6zls4OBw2MjhKg5Xc7iGwyYO13K4jsNmDls4XM9hK4cbOGzjsJ3DDg5RDjEOcQ4JDh0ckhxSHNIcMhyyHHZy2MXhRg43cbiZw24Ot3Do5NDFoZtDD4deDns49HHYy6Gfwz4Ot3LYz2GAwwEOBzkMcriNQ45DnsMQh0McDnMY5nCEw1EOIxxu5/AQDndweCiHYxwexqHAYZTDGAfg4HI4zmGcwwkOHoeTHE5xKHKY4FDiUOYwyeE0hwqHKocahykOZzic5TDNYYbDnRzu4nA3h3Mc/orDwzk8gsNfc3gkh3s4PIrDozk8hsO9HO7j8FgOj+PweA5P4PBEDk/i8GQOT+FwP4encngah6dzeAaHZ3J4Fodnc3gOh+dyeB6H53N4AYe/4fBCDi/i8GIOL+HwUg4v4/ByDq/g8EoOr+Lwag6v4fBaDq/j8HoOb+DwtxzeyOFNHN7M4S0c3srh7zj8PYe3cXg7h3dweCeHd3H4Bw7v5vAeDu/l8D4O7+fwAQ7/yOGDHD7E4cMcPsLhoxz+icPHOPwzh3/h8HEO/8rhExw+yeHfOHyKw6c5fIbDZzl8jsPnOXyBwxc5fInDlzl8hcO/c/gqh//g8DUOX+fwDQ7f5PAtDt/m8B0O3+XwPQ7f5/ADDj/k8CMOP+bwEw4/5fAzDj/n8AsOv+TwKw7/yeHXHP6Lw39z+A2H33L4HYffc/gDh//h8EcOzAJugWWBbUHEgjYL5lgw14J5Fsy3oN2CBRYstGCRBYstWGLBZRYstWCZBcstWGHBSgtWWbDagjUWrLXgcgvWWXCFBVdasN6CDRY4Fmy04CoLrrbgGgs2WXCtBddZsNmCLRZcb8FWC26wYJsF2y3YYUHUgpgFcQsSFnRYkLQgZUHagowFWQt2WrDLghstuMmCmy3YbcEtFnRa0GVBtwU9FvRasMeCPgv2WtBvwT4LbrVgvwUDFhyw4KAFgxbcZkHOgrwFQxYcsuCwBcMWHLHgqAUjFtxuwUMsuMOCh1pwzIKHWVCwYNSCMQvAAteC4xaMW3DCAs+CkxacsqBowYQFJQvKFkxacNqCigVVC2oWTFlwxoKzFkxbMGPBnRbcZcHdFpyz4K8seLgFj7Dgry14pAX3WPAoCx5twWMsuNeC+yx4rAWPs+DxFjzBgida8CQLnmzBUyy434KnWvA0C55uwTMseKYFz7Lg2RY8x4LnWvA8C55vwQss+BsLXmjBiyx4sQUvseClFrzMgpdb8AoLXmnBqyx4tQWvseC1FrzOgtdb8AYL/taCN1rwJgvebMFbLHirBX9nwd9b8DYL3m7BOyx4pwXvsuAfLHi3Be+x4L0WvM+C91vwAQv+0YIPWvAhCz5swUcs+KgF/2TBxyz4Zwv+xYKPW/CvFnzCgk9a8G8WfMqCT1vwGQs+a8HnLPi8BV+w4IsWfMmCL1vwFQv+3YKvWvAfFnzNgq9b8A0LvmnBtyz4tgXfseC7FnzPgu9b8AMLfmjBjyz4sQU/seCnFvzMgp9b8AsLfmnBryz4Twt+bcF/WfDfFvzGgt9a8DsLfm/BHyz4Hwv+aAGzgdtg2WDbELGhzYY5Nsy1YZ4N821ot2GBDQttWGTDYhuW2HCZDUttWGbDchtW2LDShlU2rLZhjQ1rbbjchnU2XGHDlTast2GDDY4NG224yoarbbjGhk02XGvDdTZstmGLDdfbsNWGG2zYZsN2G3bYELUhZkPchoQNHTYkbUjZkLYhY0PWhp027LLhRhtusuFmG3bbcIsNnTZ02dBtQ48NvTbssaHPhr029Nuwz4Zbbdhvw4ANB2w4aMOgDbfZkLMhb8OQDYdsOGzDsA1HbDhqw4gNt9vwEBvusOGhNhyz4WE2FGwYtWHMBrDBteG4DeM2nLDBs+GkDadsKNowYUPJhrINkzactqFiQ9WGmg1TNpyx4awN0zbM2HCnDXfZcLcN52z4KxsebsMjbPhrGx5pwz02PMqGR9vwGBvuteE+Gx5rw+NseLwNT7DhiTY8yYYn2/AUG+634ak2PM2Gp9vwDBueacOzbHi2Dc+x4bk2PM+G59vwAhv+xoYX2vAiG15sw0tseKkNL7Ph5Ta8woZX2vAqG15tw2tseK0Nr7Ph9Ta8wYa/teGNNrzJhjfb8BYb3mrD39nw9za8zYa32/AOG95pw7ts+Acb3m3De2x4rw3vs+H9NnzAhn+04YM2fMiGD9vwERs+asM/2fAxG/7Zhn+x4eM2/KsNn7Dhkzb8mw2fsuHTNnzGhs/a8DkbPm/DF2z4og1fsuHLNnzFhn+34as2/IcNX7Ph6zZ8w4Zv2vAtG75tw3ds+K4N37Ph+zb8wIYf2vAjG35sw09s+KkNP7Ph5zb8woZf2vArG/7Thl/b8F82/LcNv7Hhtzb8zobf2/AHG/7Hhj/awCLAI2BFwI5AJAJtEZgTgbkRmBeB+RFoj8CCCCyMwKIILI7AkghcFoGlEVgWgeURWBGBlRFYFYHVEVgTgbURuDwC6yJwRQSujMD6CGyIgBOBjRG4KgJXR+CaCGyKwLURuC4CmyOwJQLXR2BrBG6IwLYIbI/AjghEIxCLQDwCiQh0RCAZgVQE0hHIRCAbgZ0R2BWBGyNwUwRujsDuCNwSgc4IdEWgOwI9EeiNwJ4I9EVgbwT6I7AvArdGYH8EBiJwIAIHIzAYgdsikItAPgJDETgUgcMRGI7AkQgcpTASgdsj8JAI3BGBh0bgWAQeFoFCBEYjMBYBiIAbgeMRGI/AiQh4ETgZgVMRKEZgIgKlCJQpcjICpyNQiUCVrjW6TtH1DF3P0nWarjN0vZOud0XgbgoDQZ1ltWXNZeVl/WUTZCtkQ2RbZHNki9RGyXbJpolwjpomWycbKNsomylbKhsr2yubLFstGy7bLpsvKSCJIOkgSVEOck5SUEv7KwoPp/AICg+l8NcUzlF4JIV7KDyKgsz5aAqPoXAvhfsoPJbC4yg8nsITKDyRwpMoPJmCfPtTKNxP4akUnkbh6RSeQeGZFJ5F4dkUZB2eQ+G5FJ5H4fkUXkDhbyi8kMKLKLyYwksovJTCyyi8nMIrKLySwqsovJrCayi8lsLrKLyewhso/C2FN1J4E4U3U3gLhbdS+DsKf0/hbRTeTuEdFN5J4V0U/oHCuym8h8J7KbyPwvspyFp9gMI/UpD5P0jhQxQ+TOEjFD5K4Z8ofIzCP1P4Fwofp/CvFD5B4ZMU/o3Cpyh8msJnKHyWwucofJ6CfPYLFL5I4UsUvkzhKxT+ncJXKfwHha9R+DqFb1D4JoVvUfg2he9Q+C6F71H4PoUfUPghhR9R+DGFn1D4KYWfUfg5hV9Q+CWFX1H4Twq/pvBfFP6bwm8o/JbC7yj8nsIfKPwPhT9SYG0YOAWLgk0hQqGNwhwKcynMozCfQjuFBRQWUlhEYTEFOfuWtGG4jMJSCssoLKewgsJKCqsorKawhsJaCpdTWEfhCgpXUlhPYQMFh8JGCldRuJrCNRQ2UTgerBKPCBaHc8Ga8KhgKXhMsAI8Npj4Twjm+5ODaX5/MLufHkzqZwVz+TnBFH5+MHNfGEzYlwTz9OXB9HxVMCtfG0zGNwRz8E3B1HtrMOPeFky0dwbz693BtHpfMJs+EEyiDwZz5yPBlPlYMFM+HkyQTwbz4tPBdPhcMAu+EAz+Lwdj/qvBUP96MMK/FQzs7wbj+QfBMP5xMHp/FgzaXwZj9dfBEP1NMDJ/HwxIORR5MAIjwcCbG4y39mCYLQpG15JgUC0LxtLKYAitCUaOOmbWB0NlYzBCaGyMf3bOyHLmRtjJFcxdjmJP8ROubRtZGUQuo8hVDK5rGxc3q4ObNcHN2uDm8uBmXXBzBYPNbSgRvZLBFrpZz+B6utnAYCvdOAxuoJuNDLbRzVUMttPN1Qx20M01DKJ0s4lBjG6uZRCnm+sYJOhmM4MOutnCIEk31zNI0c1WBmm6uYFBhm62McjSzXYGO+lmB4NddBNlcCPdxBjcRDdxBjfTTYLBbrrpYHAL3SQZdNJNikEX3aQZdNNNhkEP3WQZ9NLNTjazibm7GOxpg762eyzG8cfe+o80ypn5cGkxSaJ30i+UQQNn+c39bYx5fxT/HaiuZIxNepNxxytVa4Vi0am4p6fcaq26mYuU2hJx6S6XSu5YzSuXeiuVcmUMEyzxz8Y0FLbjZfpGdjcJw1f33BFl5zirMXaSs5MWu5uTANz/bVNNamI4tAWi8dML2DAwUS0s8EAVi28ff+5z8L937N48B6uBKdWZag1/VGtQnqrVRBPY2YpXc+nueHGqekKUKvJ6EzKqWnTdyc1Y0RoWeSdd3c2oJqBLFZNgnPKOFsYLxYUYOQ+T+FJ+GV/E/yIt9ahC2LAc/s7h+3KYnsPW5LB2OUylluTm4mVe0ITmdnhFr7o0tB1YZHvQDnypaIGoFtgMIqh0gDmaomG8vdE02S7xc3oHNr3njuvYOYvVLGzjyQgm322x1dR0GddGTT+902/tAmotvnzd7bFd2eiujuhE42dM/xnXfyb0nx36z6T+M6X/TOs/M/rPLP6UwwsvlUIJyhN0O3ai7I25Yd1BHUcD6QzRe6xcLFfot9fcIVjUZMUr1WqrMLadeuQavkT0yApeuwy7y61NpaPV0+5Y/OzpUe8URWYmOiYKE9WpianJcS9VylDkzFi0cmc6Wjl7KpEZj2XjFJkezULytHtn4uRkx/R47TRF1gpu9ezpO1Oxwkx85vSpUYoslGY6YDQxOVHITCfPnkz6j48miumZWuXU1FnPS3TIF51ITp89DtFsbLKQGPUfr8VGC7Gp0Y6kl5rMxmeAIifiJ0+nTpTLmRk4mz6VjFJk6fh4qlSd6IhDZsLLHi9TZLF8Fk6MwqnKWTFJ02NT1cUiMo9tLI07vfFYNNpVXahEdSXjURrFQcRwPJl1eqbK1eoyJTafiCaizlB51NMy57Md0bjTO17WYr10OurkxTTX3tSZTqerK5SIoQ7xpr5K4YxXm9FyDsVjWT0iJiK0N2Sjaad30puuLlJiu1Mp0Rj1we5kLKplySdSUb29e+Kisl1uoaY9KGKTehUSCb1OnalEWqekIK4ekTBqMyheZbwllTEaGtXf0pPN6I+ILkxqVPQyoo1O/4EDBw/rZXnpDv3RgVjSjEhEq8uViEOZqOi48lTRGdUy7s+kdaL1T0wWxmpO9bjR4KhO7cOZeHS/9mBfh/8GLfZQNojVqNNhtH0gYVI4ZkQMZlNGREyUsU6tQKFYmJ5xjtIQd/KpWDSuVbk3Fk9EjZioSbeEIMdKlRwx7IKi4IuCcWu9s6cspqCTd/qzYrRc1VwPkdLv5E/NVMpjp9ya6LN4unq5+vLpE96oV8NcHX3OUCqd1Ud8Aqfv5c3lHnCnxXv7s3FR9bVqTTuiSSdfqxRq5erkCbfiVq/QplU87XQXJmvemULNdfqKHugZBrK4Cojlu3q8XJlwDhVFSdWNKr3FDIwGlRgqjDrp7VFnUBCmepla7RO31JykaOoV2qMZ89F0dU0LkvXjYtIqcSibyWq9kM+IWe8MF864TkJLoKijTj6ZEMPkyhbF7R/qRUqKobaqOcew05/BeRXSB0edwUpZLLDJ8CfptanQpNucoWQmmzNWLrGqtqyjGBz9WbG4D2i9nRPNcvZUXJd6q7+/X1vTBdYU8LMintXqkEt1REUPFSpihrsVbZz7i7WTH+jMDRmTJJY1p00sm6w6zfXFro1Ft8d8ukb1TIPppD4KKGuir7pem9XRmJZp2Nu2x9NfH8d5rI6QgbhYjBrzoxxG+MGKe6bgFZsHazwe15YRUe9orNEDNMLVlTQXTyERK+VS2ZiwCVGSupjsSyX01WV/2uASnemOlBER01lPZyxuRHQYzGm/yYu8lMEd9seM1/aJ6agRIi+Wi1KhVtCW7p6MGGIEF7SlO5EwOZ2xhnodRsQ+wRn0iLSRozNpNHuow+DJQx3xrDZgh9KJrHOrqHNRbGi0eg8lRUoT29+fMioxkIqZEVGTBxlML5+ORz2jRw2u1JtNmuSJmd1j9NfIYaOMfWZFPJOAhxJmRMyI2JM0EYrJUHszRkf2JrNNgMSMiOqLWidylT1Fb/wE8rHqEiXJyU94RXdGy+5lsgoT0tazrpSosHNwouQVnMHcQSepdelAIilQhOue0l7ROTZWLHgTOnwpiZVtzJ3Uh4QXjycFw3XHtCGUSyRj9TVUSxHLd0JyFXGjNaE/qSxiSSNJ4XI6MxJrQzR6Ky69E5PaM8MC1TidAwf39/Y4iZ7qahVhlaFwvFwSfE1kGYhpA3Egk43rOGUoS4P+RLkIjg57hjqisT59rxAT6LDLWNITovM1xppM4nsLlTNuUXs6nxTLWrdW1YF0LOkMXX9w4EB/J44DdbnsTGeDIaI3IYGQb7EZs1/L1JVMizV1rfZ2wZKdHjGAnOOirYKLLdbonMa6LdeiEET3uaVywdhRxGJxLadomBiCA2UxgvTBkEji6BNs85ToVg3CxTGFnnDyiKgqGlm6E4iYZHJXoaKtuP4w6TQZqwE/u8uVUcEHD+OyawCfRCoZHZYZtEc6U4Lg+anS6FRFLIwaQszgHPPfLBbICa35XYmOGBYoeKKODGT+7ixiHg1l+CkHSxkdr7mFiqiec1ywyJonaNKuLnZxjUR+EfvSonBvwg3Fu+Wam3Y2H8p3bgnj6wdLaarYiuYkhA/7NCAavC5O+G1zPBpLbQnFmmlHQCTvjBtGiX0JeuHSEBolw2Lz6eqG8Mp1Eob2q9ECT3VKmORnCmmMqKsL465PocubOiLuiFWyWvXGEGqFtBVf0utktkfDEvfFZqGSIISfuLw58bDnng2rbu/0pNiaVP3+Dh1OyVCqx8R6bzzgt0+0q8W4SfpUWdsqmeBNyIDMp4ioW8Nqkk8SxgxJ2SNXpJBR2pmRrwoZHvvSobHJ1n2V3Z7SuaNbE6MZNxwqdY6Mlc+ILYBYocIqhCUlnMOtXtIpXpIOHW0pdbSFFOxnaJFERAiZqqJ3xV4ljA4xXepjylm6zW2wl4qlna5iYezUPvEPuZEOi0ycZCKp3rSJgjrMiIQRcSTZYUYY0Kq76S2pprfETXhm5BhMGAKvEcEAdDQ6kjCoMxg3EawAA55R16hJUaM13eb2oTtm5OgxoWRP1myv+ZberCGs60mL3cbxojdp9IYhKBrMGph0JG1E9DTROmPU/4i5LzuSMt4yYoqjRuLGIyMmRh/MmrDeBO2euVEbOZyMGkJcgbi7tKnTlRBRkssPHRnSk9IIUgkyC8SsdzNJJtS5nU8jVOkqTrlOb6FSO6EjH8QSPpTQxLipZNbJjxUqRVcHb10E3tSsuQzuigtFb1QsgOoylEPMMVTxikVtxfQQ0QuwXnEnPdCadahDprilKkqP9Z1/LB41wGg8kdDlFfks7nQ0bJwUzeguT7ijYkXQGj4k9t++xFMvI5U2Js5A3BwSwxljZe1Kix1leH/0xqLpJhlKXJfQDmXjWbGUT5S9iiFXiBkT30t3NMm+zTzdyXjcrF4irlRvrV6ZqNiYCb5cc/LlYkGHseLBqP/g/v6hXkN23pE1Rl7TkkNjxdgGBoRy9vboWDuN44KEj07VWHTMzaigdk38Zvo0TBrZjnQYM3cwHU2Gcac+VF0JgB+C0vdo2858395tuJyZcZmYIVQRcXFzX95prgqd5nJ1wIy4LRZNGrtTlHkOYneVS/qzKFtQZ58AqpNukfCqruFIpk3ZuEG3zlhaF8nsi6WS2jP9KfOZ2+Jm1c22dGaMiKG0EZE3OcWQSbHBpgiTyIfNenSbOQab+JHYwurjxnwElQXGyDJHpFmxno54xhizpswlYYiLetLmcM0Y0qGeTNyMaGpL2nxLU0QTj4ya08Z47ZGY8dojJo/3mgSBSYNiPSavHmkWSiXNCFMPZBJo5LC5GIpu8IxFKWogpR6TffeaxfY06RdFdxvQycRB3eaEOGKO7iMpk6zmUB0xCx2JmQ02x25P2sSOpny1N2PqGk0hpGeOiCMm4btxAIQoKeqS/ME0kixkz4A5Mtuz2HUxnecNoCx9j+C+5UrYToSULGlTyZJPIlQk/Y6401gVyu4T9e3Gfm0p7BVIKuHsdcVS2KQDSBibiLr0f9jb4znJ7aGpYjsq6iKgb39Y3fd4NVyo06Hb6jyp/LLJrLGuIznyNbGrFdsWTbSYzaYFJjo+VXVxn6RynsPeqGBcurhzT6E65pVQxKoWgjrhupbkkIkXDdVodyKdjIftJ+PpmL6Bm6pMiW1doejEU/rrsnFFoagnudUJt+Ld6eISoePIzjgKSg+UK57guhqXLpfHi66v/cyHdGEiaTTAbFJvHGM08XUqlXZ6RavGK66Amy2GbnfPQKfkMRrgTWaQB5ePe0VX51dxAYmc8KKGtg2UR8UDzlCmw+x7MWelxDmtc/t+sX0J0FJal7Qg6QVbysSGdHHsZPms6Kmh6ZozkEjp5hXDsZRvA6LJ/kT7RW90xaPZEMrG4xrVgkhfNaclocSvNDU1gdpoHYnkxNrv5KdccEO3+fGw2M5Q0UhvMmw2it2L6wuVwkpKhEplEpIUYbKKpNT8hSj2JVCkV0lTg6HDmqJcXR5qM6KzURval0imW0mpnA5ZjRCJkkwOay9JaaSMUt0QSZlYs5gsTjKrppzxMAuKzuLkiYKzOZ+JxraECWsoPTThjLkS+aKpWDxMBEorndgaorVGMp5JGEtSKhYLE81RRwuShortkrMIpnxpn9qj/RnFpgG17f3OhFfynMO9rRaDjrrWumWGzPYo1a+FaI0yhJFdJqJEONEXOlrFgDpwZChsFBEl43LEhvXLGA6iMEOFMv4IFckLcuUny5VaqF1CEhu4rdPpy0ZT+VagII+kSs5GirwgRYdGyP5sIhrrr78nQaN2vgp+wmbJrc6d5fJEU0FRsyBNeTjUf9jJ94Z1xGB50snvPbytN24iHpUHieVsMIP4VVP5lCcm3FJN9EUukY12h/XVwBTK4vOpaCwUTuWmxkcl8ulPdqTDoEo6rM6BWL0fzQNCuM+gtEKSplGCD8bDxi+tNmIOHEib3K5TLDQ4QTKILNeHkwxHHyKJtIDJIe3GQRhHkxoDAQ9ns9lWCpK4jyyTWJ+Q1yp5kEufv5hYKBzLTVVPOAMoxAypd84VAGFUwJnwlhPNqPgDYg+lj4aBQulEoVYrlAT4RHwd0m95v0vSyZRhrRaPp/SJiQZjxUIFzbe8ZFqHAmL/jiIB0Y6CWUoyHsouRcWcLlRzh40W7KtOEKs58jA03tIIQ3NLWzVnGXGTclSIaW3iy4wOEDAmLtBYQTez68+iwn1PpVB1KyH2d2JECRAXDt77yZpMTCJdxirAflfYzBssFmYEbEr7Vm0thxvZvZHATdPH4hKccKokd1vbnODPPLQ2wnfXjZVwKdk/VK+dsx+3jCFjVM5NNA8KqTr2l5x+aGOl5ehHsaJqT0ZMsAU+TTj7PfEanwe1WiFwcYrFt4fydC0DFtJiStazIHtoQeoLLca3OAsjTI9XlduWvNjfD1SvDmOdDYtJMj/siCXDUB81DJFcMNfDd8PdBbGzKYhR0tcdD1+z8h0+bpM8NIzKw16JWtaXyMRM47R4oj71+lDdWGoty4wbe+NkNNWoRNhSt0ewkBNIqmy0ek1IsmibHCC+hW06G98f1sRGRswTrV4327KJKuQOpxc8dI0J1TP7pnuOvh+VDLyVuZ+0Fj3cCg0ltGrjHEk29U+rMZcIuAnO9PPmiYXKDgjECvSZCh1EA+54wUltT2DFoq1zJLdn0DQ0Gbpe4BjqzyRbCDZwTULMHW0aIlFliITRtUNy2pA6UZci+JX7Qm2z7E472Si+MRtvSkCPgoSYT4aNUjwTD8W6ErPmU4lwYVMdCGVQoqdWM59ON0xLA2mGRh1COQdhplp1Z4jLhb1fzq3+bDQTNW2VDIPzbtH7cV0l0L1323Aqa2zJ49mQLfmhjKhszi2Vz5Z0854MyixqbqWE2jFdgRQLTMBMuUfMVD6ZCqTeWCxmagRihrCuM51K+woFrQ8HyMwJDS6dqm5STga/vRNiG+GVnc7KRKHkGYos8x0DGbShG/SmDcXGUDye1Wd/JpGWWFnXL8QTaaNyaP5FldMdHwZMPVVXPG1YxaK5o9PbfVB7rstUMHXFEqaVKKIqtYf3YaP2T03rgqIBU9JLrF9tpJcNJD/6gyi8VSvq+2yMajPIV2vqgmJDDLzHpP9+MeoE0K+5ru6w0WdKg7uQfNroyMTTaJEp5p5hgmBKmk2J9x4UFYWso9LInlgNWtrrEqKcQKViekxOGkMd5frGOpKId2kLpASOe4oFwev6UwJIawZ3CVSqk6GmYUcpUsIXU4nhvKTADTpKFpuWPpUf6can6bq/hTetrcI95anRojuE4iWxgKdNJ4xYuBOGWrYUFqL/AlbZ8MQwnw8Fz7SWH0hHW21aBHM9Qs4YPWGiL0rvnRTbXBRzl6fGTjS7ZgygNXVydtcM9JnJtWKxqFGQfgbhkrUBX32Au5eQFh5Fq0p00zCcTXw/iKEjQ85QIm0qt9FmfJ0ekzV4iiG4jzUUEjl9N18unSnPOLoNai4dj4qd14QneJgqrRiquCUwLCsCB6KOPl1P0xHrMCXhKcPWplCpOUMdWd2iFGeaoImYEjFDOqCJ3pH1xROt5SIyQ7QzzNNCF78PJOUcLrqCL5zQ+rAbKxGoQpyeQwfzJnPKNG2XE4b3hal722dq5/aZXnPIKXV2kja9LwTH0rd9g3u3eYl4kowTdf15zPTTiJnrnmHhNZQxnCO7TU2flzSEJ565ontJw6bIZ7tau03Ph8G4UWxn2nAyGUqZ/hjJqOmgEdU59FBabDy7xCqr86ARUwXe1+R2YbZ6wDRuwc2c0aQmpmL2/h4zR2dHTO/sEdPwbcQ0fDtkVvWQ+RbPdLvwTJX3HhM/9GaM1/aa3p29pvFZd9KYSUOpVLaxdOlJHelGUkL3uSBjm4bPRYfWf4fQXGivWKpL2kOHOlKKeZbp9eEv+trykSMHBoEL3NI4qqWa+MGAQM5iHajWxKbIsC5KNWlIAy1ZPKlvjMVaUvfa6pwsF4u6d1ZXGj0JdCDd4SNIU9uK4y1fZ0a6EVUyEZjjoRGVyraH0V3Kd+WIxQda+3LsjWnk6Uqj20GfV66Mh6Pk3riJMMXylzJEcsm0snMTnaDrZzozmZTAPRUUZRqGfKYRy1BMzHQd2GXTzsBM7YQ3pjuIkydM75jhBZfEymr+5eloWnps6Eg/iTaH+72S2G6p42sgjgaDBNiH3LETpbLho5EIfDFMiiSjxlBpeG3oe04xnUSFQpl1VCGiroQVlY0e8tNM87a0jsalzmvosJZtOItmZxpdU5k0Sp9Q0qDRK5P0PVp1H8dEOhh50rktZM/fmQkM8kMADzpK+Kmausf31vCdBHTGb3hytFK77Uv6mtcQaQu5b4Q7biRbOW7scwYK06EeB/EwR4iRBNJF7DJRNO4LkkKfTs7iRtGZVX1Cwpw7Qr2S99WJGiYska4Zoeq1wKUl3NXhwHR4B6dncccQLZglMTlbYt2RI9QfI6178CRadXMyZGDVvU80NuGnhYJ/3/PCJ09rF4StUmbSQjUuXpwJN9vxU9G/Iez1HVIDLIB7Ntkf1juH5X4rzMgg00qu3tEQdSaNwYU2HFRgmJlQKnBDaqFs7nSuRRdlwYtDhpgv4kq09vFACoU7v8zm3tGq+XndlbnbdAPuNg/e6DZhp5cy/EtFhOLMoWPWrIGSBrOmbaV5yMSRZNKMMA3nTBPGgSZDyaTpTNDkRpvJ6kafI6Z53kiTBzD67hgGi01uGqabgwkbu03r0+5YxowwXoNuGsZrTOPSbtP6sDeRMStiUsSkWY9J5qFUPGv0nWl92GRJaaqNR8zN24jpmD5i2taOmPUYMbdTI+YI8ZocQQ53NBvy65ZneLiJZwihopnkUFNUdEh30UgKnD8sN71od2jgqmiThV7ckIl5YoMtOCHoDIf8KXyZUuGUq4OwLDmoosmCDjYy5INRrXmlsRra6GsbA/RZzpXHp1wDTyOeKRfFQ2IrbHgnRI1TVVJYVWn/c9jYrig7mbjhkSyWhzO4HIBr0MYUcA+QyFtDq6icd/a5NR3xUazcAYlNgHniUNLoadMDC/0mzE6JGs4nIgZxKGoZp0zJaNL0lEgadMqnkU7SHWJUSxkgHwoSlMfi5tqRNI/HSAcy8KGznnFagpdKpB1aXbWxmEEVP7146OCh7r36u9OpQODfs083eEjV39TTn+82Jmw0bUbo/KC3yaDe3G8f6Qj3ZhRoCrl2k0sGUjhEkSk1dKQwl2q6DIqmmvw5Qnw8zArlzHUq1yRmMCMOmOv0gaQh9qeFgGaHRnY8hUrsWyaLYh9X0bcS8aTYohXGS2J4q93RmUmnUQ+ACjHExMZoNoU+sZSuO+lXDmwx3OyjqWjO2Daa7TalOPkmLw0zYqjpEZOTDDVbyxtqiOFCbeyEM3hCbLYN1UeTwX+TaX6T94Y5HM0KHzG7Mm/Wr8d8pKfZ3dJkW2bFejKm/5cgy7TBt0wwZFJupEnsZHZ/r8nqek2GesSUGx4xnTG9ZnlYU4Qpy2oiWUdGx0+DadOxqnvvti6TsCOHDf44kjAPa+lFVxrNexCPvdHHyWCmaVHuiBsYqQlXmnjuiHkCz5FUkwuqCUVNYXBPE1htBjwmbZvcPppOp2nyFDFr2mv2T2/CbAuui7N7eZgaF109YBxKlU9kAudT5MHagRWoQtM0aJrXYjyVjA/7ujraC64xU9VEbfFDr1pDyIajfb0x7JqOqzoTun0OHEHCEvu8MiKXVOhedAC3mmQyGbYPQ6NKL5nMaoLOXLahwOsvwVRxXD9Ip6dS9oAO+Rp3nX6c9bpQV2C3vqlqzTy7Kp006RFL6vZDAoskor1SeSdudeN+xJHdlcLxWpgEQcAzjTIkEFbOLmvyUrk1ePJWg80JMLSHnEOM2qcMU4+BWFaR4Dp7y5OTXqm6SRtayjljeVP8qta2MxNr4FpTiyuKaVjXaRLZHAq1G1LvfmPwqsKBviYbafQ2Ucee4TSF4mnsB12gnc8k/UOLTCxLGm9tzvbG8EQkctvJexNO/oR3XEfGgQ/JYcHTElkdTsqkgak7vVMOnuepPof+VRJZdcWSWQNxoC2qdjgk+ryMuAX9VLZ+mnfuhC51il2AzeLh3lC7+XTz6qDZ8qCwMczPJNT7pDPUebjTdy4IcR4+3NqISApNQ2c+2c7GWxqC9qJYMUQz77uTSPFNC7v8hvVhmHiLLMsMbxyU1u1HMVTIOS+YeD4fgdieUCuCpBS1tTyDRdp+JJrdzaQJfbylOTZZ+4es+rF4OlTmGLiUoMFlajhUtpVs0cbN5bFaYduYKKAlvTtaWKiicWdXvagEniA10XKNoDJae41kWlrsNrxKsAItbClkAb54Mtx3hvZXLUyPO3zixRKh5+WQc0pYsdIKsr9Vtck9hKrdSoxLHiZhj8se9aTJbtisHA6VtyYD088QCTcemBU6IeuOXq09fgT5w2o5WHEnUK3Sn40bp4WSJSIZOOMZwFq39Wdi2RCfpbCulfPEN9aNGlYdEhoFEwXFSal4mOHzYQ/dc2d8NuXsT4R4AyS1+oRaPTs5t1iYkUfdZrMt1y7RGrIvDUn3Tav7yJk/TIMz5hsm9yczoS+g1SYuSJEMt8zuJP7uHCFvjOiAjllIUZyfQMuZ/kxHqA+Ov2qjs0PUPHgtoR+8FkahmlssFip03msLq9+G/0E/ahPP541iYL/OSkV0wAAad4fabO3HM/oqso9yKF0PG+u1WsErIYQP7eOuYuFOl/o4ncpqeEs17Wociry3B4UMadNiLxXt0obYoY5MUrM56A+boP2ZFvZk9YGBrizxUN+kyUmBQrCEUOepYGsjGjaIp6gMhbqPiDbhcZR0Smh3iD0eWrzh7qqlr8oQekK1TKVpoQl28QjgrgqdrIinOZ/PYcUY9AK6ppV1xHf3MVxyErG45KNh87VxEPVAK8aS3p6WRnqZWOiBZjRehzLpbNiEHvD9ZwfEFjTf0kpRuva1mC0NcNga/uHjIZ2ue7IgG2nBfQMflFnT0V0mZNYF6cjoEn078C0hw1PJFXoQwbRXnnByGdO7HQ8NCAxc+kNXVOSSQceHMMl94daZNJfiOJoz0rw9AMy+6b1pN5s0jhb1s9N6E+pxV54S3dInNrr5sF6Tnt6ohAyrHYINpJWYhaloS5fqRNgBAoaPuK+EuM0ZimeyLXwhQvxOVe4slcmtcEGCFLDn9SAJt8ENrD+HkmLRaDn60+SOsWeWKuDUCOH6ZHAauIthV6dD3b18locoIo+mUU3uJIpbTVNavEWa2lEtfKWPBt6miaiOOdGhJBOVjna5phQ8owzXtJy+oSaDHfT2bfJBiaaHwmYcOTjl0ZbaWFJjOhSSw3RWJxJ9F0PDt8ctTk27dY/BbJw+Q3PYO1PGA2VIGBf8IAuaRUbEVvKYppijYlArv2Lxvkbuo8lE1Tmwp7tRYD4mC5yvZmm8PO8f2CvflYrWzJfF4p3Kr3S01ij6cDzmOsm+RmFH04lGKlVFS03KlSUot4qbk3hsi1q1uNIyLL7xtPhVXaD8wKLrv4/Go1Wnr/EoNkQlWLRPoXBKEmSeEqGUlI5jyUuC395t5TJNhaRaXqzaeCKfJSK0N37jz0XG67YqdBCPz1dq3qcSINappXU2auIX1Fks0ld1nBp+S6ejj/4k++gDT11utUbf1pGwmuo0WEER135PpGCVCe1OkiqF1u9DaILuXHt1JpFO7xLL0w1R30vcSEi2SIjLJ5Q6x/TBqYw17KVFjS6MOtj9Kl2i6mBRujufNsZZTC1Vnw1aUlqrWVTptSNJGkHKyE5UlTGSjLraAMPa5nvV0RhV54EcUUuUytPYjm7RXu81Wp+P+YKiuY02asNAqczhWLZ+0gFFjCSng+IXKTWWw2yhEaOMc4Gk66O0L2bMTYX24oXU0/W3b+4vAY4Yr7ClsVgIqOgk8lop+XStkd45KTJgLRsvHUlh9nqOo9lYt8yBY5ZmWkIZ7wmqPz7hNM3GTDLp5ApjHq7ovnFgoy1ZtZ9FKWoVR5Ke2tB048ehuLIMUP+Uld5JqrRSh5j/qaOm6imjISatxLJbGuUdSigvTkwrP2IKvYjEjT6mn9gaZWik9T4eiUkby0Z1JAnrVFpkJjbqNKK2MavOxXTVeA6H44LGOw1axVRyT6uESAaEUOgXbzx5VKHKiEqVo77fhezSeNrgaNmY0qkiuZGEtVX7O6l0QUylo7RbNQev6NxGHTLqj6zayISUotU7iWTwPV51EqUjAeWV6iakiaNf3YSTn5nAJXkmbCwnG7UUhMadj8Lm4tpqJ8pd2qiTs/lwLBNL9HRuCYntErGNxS2rD6Kj2bRKzoTCkmMxbfpoJIzppQhCqMur+uBILKr+8FQix/Thn9d/alhEm4eZhEYXTxnXMQ38KKuwaPjR8lTthDrUFbYjinEO9aikiDVKPZxtgJkjcemwp+XG+jVyp9WErLJu+4+qb1GX4rQKHFLKCDystT6ZULOlVJ6rAhexLCk9lpTDSY2gqjQIgAM4pcy8rMwwtxGhNFF/0X4NE+xXEa5ZSFpFS3mTHKm8hrz2a786jcomG2Nd/nZ6i9qUEaxVrUvKrEtKrXZafVksqYzFwzFUMDXarozaI0mcBgrQEDm3qsl5Z7+S7ORPlGtKcxN5ddlTl+R4XC0F36/icgUECUzvqs/FVHibNHBNDHFNY+1WeltMKWMrkq2anSNYbYMmCZ0mieqIHXx9cuIbz7wPwzOe5TjKbeM+NN2/q4eQKP2Z82cO0tqDu/vFw894btMzz3j6N57xgkaesFhx/xQMZhPOF68miTdrMfU8L1ArqNRQqUczeUJjg2LCIvVGNkiv3Ws1UYlFr1NJ02iYHttIuL9F/LMuuBZNPdniybD089PCoLPWULUu9X7TwguC+Bf4hPLv67ezxWkFNdKNaOVB2gbGxsSF4XymfZ/4V/2KmHXy67Qgv7o7sxe/SCs/tHuO4Xdq5Rd4z3G6j8iv8dIHbzGHhd+s9XPYdB/xv5i8HL+YDBHWt7fWVv9ssthw9u2VX8ldIW7mYip9LRfLirC7I/jZ3FMWq7yI1ebJpxh+RvdcG/PmM1jAYCFLwSIGi1mqJiKWiF+XMVgq/ixjsFz8WcFgpfizisFq8WcNg7Xiz+UM1rHUuTnMizBvLsMnrxDR4s+Vopx2dnIBqy3EOsB6WfwG8cdhsFGkil9XiV9XM7hG/Nkk/l3L4DrxZzODLSyFVb5elC1KXcxOLmF3i8ZtZXfPYXADu3uuKNM6N4/VLmMnl7K757GT9OXuc/PZ3eLd29ha2E6tfY/doMkOjSbt7O72gCZv5QpNohdBk9jsNJlD74iz1KUiDTYuIRrXQf8eIKWWG5RKEg1erFAqZVAK0kST9llpcrdoSaZBmqwkzc4WpOFEml0PvH9vpFo/qlHrmhigN8nvW4uq3yy/Z/0u288vG7RbpsvOPv2FWRJLl7PWifTR6FvELD9AH1muuFW3RhwYXQSOJbbXppWfMfqJ52PJr0lP3O7/3YPH1Q541arYP/tRd3hx5n+JWX5NekIsMiSllR+mnnBu330HngVSnirVnANTE6NuZTdJj/0C5LLk/7j55psd/zaHO6Nqj7icqNUmqzt37BBVrokXV7dPzBRnThUKk5PV7WPliZ0dHYkdhUlvx5nEjrLI6VbOoNPBjj635KIX4MGhwcNSgCieKHpjBdwp7ThZLZdI5IXHlEzWagg/CiVAg7kawpQeF0s5KGHKkW3dRc8t1fqhmhE/94u370hsT21PiX2pqMH22gmsl1f0TrniHbuc0SmvCDszIrlTFunknER0C5V1SFRwW+c4ellsCKnTLmfsRKEimnrToaE926TxK56kK/JvG5qZdOm77eg8tVhJ2O+WxsVuBOXmLYhEYr695ars5Vtdd3JbZ9E749Zkpwdfo6ds43d6k7T5lqTZ1lsaK4Mok+gCRJd+oAf9HxOuRzJC+XNADKGiknygMOHSWxu0UEo6WKWsXrV3YqpYqJUrJJdMRGuISsvVw24FrdWpp+QDNH5JEElZKu5xNNwdk78mC9XqMVFbV363HumF2c8UilMudfEJt4BHZ1AyDQGMFPWrFsal0lSMFqeKp3bWys5Meari0KucEg3c7U0D++DgkJ/m4Gv/lPF6uFD0IBiv1M2JRIoqWm+QSC74rT495VZxPKJWQxDJO+4PIqc6RefubG/MMb+q/aXJKdTOnHX6vOM1LLO9epNS3bOFIn7aZZba+jl25N0aEpqqhSC/U2yFyxXvTqoA9V9MevmGF0m9HxRA+65B/4czNYkUgO00uuW36yfy1KCNRIDeaa+Ww27ajO2mV4kxjYOjOlOtuXKYlyddOZArorPlN+WL5aqkYA9SEG+K5fGyJGXh7DEPSUPRJ8oTcoqNeZDDMZ3DCVhbWB9+xyZoeGOpewrFqhxVVelTQSVMigrlbsSH2ug1BZCjreZO16hksbbm8MWbsXjCYicpQ2GqVqafx2lGFGjJhGNiPcL2yUFGBUxVihQlxzI9O1qGGUo7647KWojxnMPBuJkFlypSC8ZrSFqxxpTG3aAPpjG1hHn4Im7zheL/drouFr/a+ALxb774J+Pn8F7+EL6OL+OXGbEVLXYO3y9KC2La+SILr+3KlRAoUqItQKDfEc1C0Ml8BOqDTq6ATkuCThbAUgk6uQI6LQ102ggM7AbojLCTgv1GJG80EaiAnd48BBEC86SAwEMKWWkDQCBkaAU0BeCY2YpY9eR8Qq+rECDcw7kADxI8CaRwkvCTQBDDpauwfoibVsvKLGYCc9Ddknp1ZzYwzLGWnbwMEYZAI2sFHFl7j8Tnw6W5WIaS8Qo145WzZFyvZtwwS0ZHzbhxloxXqRmvniXjNWrGTbNkvFbNeN0sGTerGbfMkvF6NePWWTLeoGbcNkvG7WrGHbNkjKoZY7NkjKsZE6EZ97LpTlZbymrL6hk7gow9d2xiiELNV4oMApMGpZ2+xR4+T4nJkBJTaolJvcSn6iXKxqTVBzKztDqrZtw5S8ZdasYbZ8l4k5rx5lky7lYz3jJLxk41Y9csGbvVjD2zZOxVM+6ZJWOfmnHvLBn71Yz7Zsl4q5px/ywZB9SMB2bJeFDNODhLxtvUjLlZMubVjEOzZDykZjw8S8ZhNeORWTIeVTOOzJLxdjXjQ8Iy4sJ+h7qw017sobgXuzQ7sBzaVoZuw4YVeDfpf55kFnwXZBFbpxpuU/wvmuwpV0i/VBrfXa7e5G+SQjdUuWsRMeEJv7nNQXX+PHunXBeWjxueFu3KXR9UoLHZyW3Fmt2ACSHbm9wuTEA8cvvGO5zeSqVcyfViFJoH3f6NF7ziDqfLq9ROQGFml3PXufbcLYjfMP8oRvcEXzFvZB2suJNyc0IP1JYFeUURg/WNC0F4+RASXEJJ+UC73z7fZXdNPaP0ohssFnArJvMiEB9Xo0n+LrMHeRAB+l0nE/Z01dMQNR4fFY81mrtnqlgsid2bzICo/biIof1cIxP2iZ5pSsQYmfbgGVxn3YpSkh9DBgNqJtETMtf8ei4cIEvq2bq92kyjymN48m2D7IMCfXeLnhgX+xG3KrMt9lF5I16hZL5Wcd1aJwBaKzYoWVWja1jHyalRMdZxx10Yq0k5RlUWTQnD7mgVFUZKo4MoKpX2OcEr8Wwi+a42ehcOndWNNuCOU77Bz4U7hclGrEIzsWX2alMQFIeUDaKUcVUsl8bVbPNpZ+LHKdQbpv1aN4pqKjN+3qX1fZwfT4dKNGogN58uOI0h61WDSKVV4qGxijdJe9RGq5RYhXIj3iS+xc+HO6w7ZYxSV//rGkjMqarT6LkxNV6pZ6dYq1wx2pVCcfnCidIYDb0TBa/o97qfEbd+rhKt5B6kntd6anmLkdJ4KFDv4wxRCSGjad4sa8xzOunTz4Z7vnF59GejWQNuBTdzNY38QaRSkuxapSR/b77KaI0z6pWVUSLb0uWVlZ4cKtcKRQcX7IBGWJ0axnZPkRRHrLX7MVnpUn1QzaWOopgqTgHc12/cjLlze/HSj5d9eLkVL3vw0oeX/Xg5iJccvtMmksj9dm4IL4fwggtcbgAvBzDXHFpJKhOFmhR+4t6XHi26pc3YyhwyytwRvBzFywheHoKXO/ByDC8PwwuuQs076oX+onfM50VvwbQe3MpzO9K8m15E++k5/GbxeyVfJa5iRyzibb5B/Iv8Bf5u5Nc13f2l3n1xf30pAU7sup7qRZH/NXqq8+sflolfyxmsEH9WosggBatRDpASe34BKVNit486GNQ/WCibqEllDOpk1hs6mQ3yLY74s5GUMQhPr5Z5pU5G7KhTpJOJ4J5Z12NsQcWF2CA39BhLUYNhal/EpllsjrG1j4mIjbFsY9CO2dRPfhu3z97GCEOdyA6WumRNnY/7YNpxi3/U8ni95Sv9lqMG54JanqCWT0QaGpxVTOysAw1OXfmkJicpuRFVz7eAtVJfLWR3i7CAXnY0IjbRYuuMubI4EHde/MDaJYl+Ywuiz8F3if3ypSO6HF9Y6s3iTzvuses9cQv9C7pg9cV2QSdR5RalC9YwsRknGot991psi/ghydkju2YN7rRVNdrJiChOds8eLeGeCMb1aXHPb5G5tIqFJ9D+DtnYAdqZ/RnVbFeyupTcoU0L4hTUXLjTUsLbUAJ0tFC0xX1FGzK/huJCqiQuUHGRk8qIbnojivLPliugIqkHxV7xASvbEv5XBEKVbaGEegB7z22Yuh0vO/ASDQpotCsXwwQcBQTJS+VaXeczVD7llnIJTO/ASxIvd+EFt7QSHN0U0KBJwUXg1inIrtpePfrAB0KgwWo9EiRBkx253VinW/DSiZedWDsEq3VNkFd1zmBx2+VuVQzug0VwgjLlbkpEojoriNzY0M4lJpyci+o/LYO3QptDMZpDS5U55DgDeGTUxo3VvQoVENrNRgJK33GeRnek5XlaYYXVlhAcLh33KhNBAbQpKLlntd/lImi/q954yQXq/roeDRMdOfsF8VapK4TcfUr94EbyRAqIpVLJ2ekIjE7Sg2CJcBCfk7G9K262b5fbGUH+PKrLcFI6Z3FIVc6KTaXcKYjEeoEHyjWfsJsv1+F8GNjfr4F3CfbrmrjcIF5uCzYAubw+yhtbABpgjwo2A9QFk1NS8ZZ7NAL4RS1wP20RaE4oO4CHatsA2r6IvkD9WV2ZSZ2CCWJPg7+btwm4vWko3o6Nzhyj1xwVG4XqIzEjat9C9gu69m0/P+Rr1fD+lKJhW0jX+fRX5se/mGtGybWYdhvtVhtdG3dtAdZerGLt9/4f1v5zYG0T7oTaDDWw9ssVuLMcTXyaEOf2i4fiO84HxRG4RS8tFEcYHqd/RJjEAydMBxHmCTphkgZhFIiNyalWUHyhAsXT2phbxO5ehGgcXzYdEThcgPAVhMLFZdfFj7sbJdFvag3FFxJovmREXyjBt6jhLQ1g7vdEJ/0LumDFxXZBF1HlmNIFK1H3RTTuYSdXYVvqUNwH4CtNxP1YH10vN2H3SyIYt1eLe1eLzHUobiYQFO//S0DxtQYUl2ZDTfZu54PhqsbmUsBwRZb8vxuFo+fzgwqF5x6HFwN0V0JA9wVYlS3xERPZIZEh2cYHPhJawHBlJEgU3pEOReGtDcouAohfoWDL7vIEAtsHisT72J+GxM12dySTLYF47olIhifh5cl4eQrW8n8HsMa0Sw6sw8Tq9+OlAaMXt4DRuduZJkPXEbQceU/Dy9Px8gwWJldf0QSY5YR5CSLm+x4AYpYo+c+EmxequLm37f9v3Dzzqkg4dH7pnxc6c3rVlRJWSLO3NkQwCCs2KLBiITu5iGDFYh9WiF0wIguH7MxWI82Cn1dRvb8TERDnAhwd/GpeI6u5qSW89T0crpWwSWAliXRqBHMQJmHxW8Sf6xHupBBb3CCbv40E2YSgo9KlQsezl6bVEuF+qgGvBLw1nBxSF0GP9Oz0kI4fmbrjx59MFmxGVhpziX9/Nirtkk4zDSoF772RrT1HYu2LHuA3X8jG6FJQyp8oCM5vrNNkxcXTRKXHbqLHayIC7osR04lKjtoqnJLSMEkOoC4NVX8iIorygXsQ982QuE1tTXGl3fV+6aEXr6A8vfV9mC4xX8qaShgurSE72LkmpBer2l9OkF7n5okJX51/8RL0zosGKgFQ+38BpBMSDkPoLbHWxcPzMHSNWvZ22n0NKpDaGSuUnFHXES8WWO1SORH5vjfp2RxtLm4P0dg5pPCSFheyanLBq+WydUxOZ3IN5J0zKi4fE3Su4KeexyqudJq4hul7UOnx0S0Fy/KhbtpbXFIvlXg2ZmwkWu8hJKK++JniQ/rGPKl7yMToZXrfBxi9YbJCzyOaroP0KxRaSZSuFnHXOfIy0a0BqzjEh0642uYNFRWjLhaNY80ZnXHEyBNgHj+XQ3Fie3elX8qBwqmCk3PHRaJ4sFRwqgXHGy1sbG/YwNBQcfFtWEjtgcL62SF8GOi/P0D0RNdqrbJ54QNH9iRDr6FaItxYRvaQJOBPEMw/4bxg3lbgvHQrEcDcajijEEy3pONJ4IoioftiC5+RuRaLmDa+og7h/X8itt2PW0DmOM2A/ov/ewThl8g5WuAdRCc+ylkvUc4GQxLpyLdsFH+uIhGk9PAIkYDXJBjS5L3XE8rZej5h4w3khIGt/WDEl3RfWBt3yDZGz+cA/sAb58NagWnRuxmbmnzgbUxRG98YaSWMJth54a3Pzt76NsKxOy9BD19SIkhA/3wd0AswL5A8AfqbLxDQ724Q4hZJiM6Ww+CBEULA6NaGLzfVKbD8YinQTRS4LyIgtSY792H46yNoI6VL0d8fEvfZ5jgE4CvCROR/aZBdN1S5WJDdq0AHDwTM9GozYegBGa/0oKxVd5zxMQupWwsPCpuUP0EQnsq0EoS3IMjFI21yFDiUz92DmS4K0IYZoTycNcN23BXIQQBlt+qUyjUJmi4ZVs89j/kOIq2A+lMDxJ17Pl4aaPuaet0ebHg794IAYbUwlpEYnEB21wME2arlinhjVwAJ49lEawMWxHvH0R2aJnmvalHUgOPrG+RqgcrDQPcKZkrESRi+kTzA/9wSb0LFuFK0RsNPPS8azr2IhUm3F9SBMK1Lu0V0tXJeHNzkhG1pDtl1BDxWF12r+BfRr4F9LR/t4pSuO2K/xDLQrlgRBBwhNLuCHKoDF+oIqs4lBK4nIzQm7o3xHG0STs4h4GvR/VwJjUWOuZRDwGE7yBGh+7kaNJ6HbFCRCs0nB2tGz7aRw7b01BZwQDBoZNqICAQcEGggJZB2S9SLjtkL0CUbGfQaZNDorT0X/fpOLkZOLdALvmQesmkBGtYK0ICs+R6r3lSBIfr2Tndxwc1rxM0Faq77zV7PkavLOMHc1woazazD2SRwtWyCwBphroWXU6YNlMNha1cHQs/mPFdRnqtb5JEvuyZ4Wbiztcx0bZAp3NFavm0z5dgy69uuDwoK9a+uLUNAJGAOWQkEVDzdyYdxJPmGLStkX8vyttfH2Q41lWAL8qID/gkPbqHi3aChDqlVxNS8VypfkBL+0qCfqnfRa29VVH/sRHVqXIAhwVsm3fJk0d0tY4fc6dpNd527Fs906C+BO31TjO7z3p3uTbHUg0JkeAmlfLvqIAHFXngAtVM+7tRJQzKVqlNxaxXPPYN8pW71GfAVeibIsJF4Ih0v2N+zkxw3jwd1OuR7Ku50cthlvjARUwLXR5FygvmCzbp/4s6615ZXrUeSXnfABXK8ku9B4EPRewpj7mi5fMrpB1HgeBAdeMnJ3GdEzPhzn4P/vWM3pQeNQsnQHdKdE6VIZec4ngyysXEQiukg1fB/CuOKj67zQuJ7jwrhe4/By714wemTmxOw0M1IG3mKSaH8J3pNEWbwSuDdg5xv2ud8bXWVbBgPXMJv5ivrh4xcSfHrfIehy42/64y/evwi8b/gfnZ7IO3BNkYC/vcBxi7gFBKxfp0jS0EgfpiSW9YUrlpzxS95jAjuRduZYZHnMyfB+mb6mDyTRDKqu23iQRF0WhNsUGwoT87zGR0yqWWCBy1na2tS9vMq3K7mBb/LD5cWkxZlga47wVnZ5GS+jz2QM3/63Jp0IcyVxcL0IPMDb33W0ANdf3DNoBl+olANYLbEgMaMmnUekSx/qDLlEsz0T95xx04d847TKVWwmWZWYx6FzJ6miZN7vfjzIZwzl9OcCRChdCdcxlfwiDVP3I/haqYd8Pgi8dDMtcrAltiMK9hMPzzH0g/PEcP8ZAShnQRfErvh8CcQmBJjH8e9GLli6JMIpV3irwY2E2NfEdPgMJ+LA1yMejG0SUnJxUgXo/pkO473k+TLI2Aayk7EuBfwjY7Qs6QRAuKv1Yi/Mry2EFEbZlvTwF8Ogjk8PEdWeq5faRT9iIdX9+3184uUtQLkzUSxiwSi8/NL4CYwoIBr4rGTlyEhZBEm9JFPrlee3HCBT66jJx18Eh8LP2FHZroqyBR+uo7MdE2QKRTsSb8qAfYw/vQP2bCAez4hN/v2k1sUadT1RO4eS2C5xkAQY8gJKC3AmMgnHkBL5bXyhMr88OnnWzh6tskSCezbvi+VdaS0hurpwzotmdasHWFrVh14xX3gFW8GXvH/A15/WeBlCPni+smPaQHGnv+0cDDW3sh01znHQ+ESuYo38Fg9VQIzxApTteO+TKwBzkQ6wTMCWSo8y70KozGzCskkIqODqhXopSMvD3XJ/hEZWH/cOzgwUyi12kDEm06tpGNDJMpczRo7BjSdnCKakge6dL3fSK75Badwqlwad0qFU16t4Ms2DE5DIgzCbi88L+NRABy5v5CFYYkO7JMnTlS8SQXaIVQkVOeVavJYOvxRqIzPzpoI2OVeHTC6SW/sFL2pht+brjUjPrSkFAnHimJIHKu4YzgiKp9BTlYkTmZrWG+lgfWuoJSoj+KiLVCe/Lu0LitBeYfN1/B5VOpcvthqF9dmp/IrbVPiIX9YqrLPPq+yL3JByr42Xdk38xhL6vuIgeJiPM+X/M/39X21eb4Jj+COgquqiXOQXYqSKtu5YLQyJThkDlUbc4jxoj+BJhcR4KZZLiK4KJk+LTE0JSk84Gid+NMmraDmsVKeefalKrxeqmCXgokiATYgC0ItjzxeGEu6Sop2rpa2fZskspaKFzK22kJHCgtWRVZW25hiiyYYky/TIc8HecTx3fNIE7MAbZLwDKcFqI+R3iB4DNpC5p++5hvifdQi+7t6FaVDCZprLcRzztTjzTApqybt1JJ2qUk3akk3qUk3a0m71aTGMWXY3Z2BbwUNqGRQd99B40cWPt6t5VnKMK5HjQsMsZYrvhraZgL3mFJHlHtlsIhfgKhkdo7tdYfwkBUN9kAnlwZHDF3IAqw+jM/uLZ91Jgqlmd26xbZiRb2x/fzlxidoYStMoEJJakRooW+oRYjjX7zDAGoGaJ+By+GkBAmhyrNd7IGd1FqCPneixfnHEqs8aE3KiKnEotmOMH3Xpd/yNZRTWLagWmC1hIdezZD7xnZCKcGwvv6OIMdGMkCr0tk8OfdsoQIo6PF1nQJgIOih079kFiROkI0MkeqdH/JwbaWaQXt0FVPOP9YfWqAnEaKTLx8S4KNaoNOepSmV+tjq1tnoC771esxayuWz5qSPiPg0JPcHpGm/6IqKeKbmBLvt7XV81EgL/Gxp54tJ1RA3iY1SglfXILVvxoIuWFP0Vry8PkBYBIQmynAePdELA6QlARqJylo6GOOwxuWMlhTRSXjEbxu9puTKU3xFO6Ui6ULkAf4JxCUYdycWiqerDyMwtajFWb62okhqq/s83GHl6Uyi/aQmWkV51tX/r5tI6QZS2MK5AYD6uKjNOEN0NIviSOAjAZlmVpFkn/RGEhkJxBTwn0BxhKfzBvxn+gjyOQRBFgs29z1cQCtfUmChBGFtwzRK7OcFhForENJagZZWq1EL2NrVvgGJKZJomHoo2OUy30odo8/j/SChge8E4T/ov3mD+CcNtRHNCGQ2c1eASDgpceYgsGnYhkhEMi9AJPNJn3YN1XBeQ/pCVGpDLISvmi9fdW0dGiAyXYYKHdK10IcXRBcMnz6FdBbXI+LfcI1sUwKQsdW3M0KEJF2Pv48Kl0Ax4/fI1xp0F6uTyIsCgHvI3xYz+1t7m0n0dY/ln2hzUnT7ajprRicJ1VUkrwlkAWhPpD8XPKR26gK9U5tLic/y9nna4brKc3MakoiOOuAJ5ciqgv1iDtcUOyDcSQe/cVOEv0kegQe8E3xayerCft+MJRD6Iy8hjTKdz0ZGlmR/vT5AEgViSfJQiAuRUOAqcte5ByUCaGlUfkkP/qQFGalPW3Y6RBA8PJZwwivhFwka8EA5xDH4ogUe7biuKXbKR6xSs4MiDDrZfatXB1uKhhJ77vYvvOanb3/SHWMnyuWqW+9yaSG7qxVADbGPqvPWRIO36rqx8vg2r2Sgdarc2ZaIvoPyLJTcquJWT9Dg2rxWZ6v7A97qf6GgAEWv5FYlWyThRV0oIcULVVkmfmetUigec88UiqZuKoTXSiA+OSkYnmTY5Iv4RrzYdVa8L2DAMvcYOg4SM5XHnIphQ7ILSh31BDiZkEw6zITjzSzQB2D5xfJ4+JH61bNebezEMb/rdiMjzhEjnhMc+GfZ/mF/C/havpxfyx/KryKJxTLxe53PsOf57BqZ7VpxXcI30HVb/Z7k+kjUurZqirMLEOo3zsyXBhdcMbiQOaavx3WzMskNQT+KHixSQTGUcKQFj0epv9R2wdxA3H+StFb36PouipZsu1kB4DNZwTFnXhaoAtqQzzZMMSSjmRuoAuYRxphP13Zk1yjqXhlwn+lB0gIskAihi1Z+mS5W+LXyMAfBvteeW0x3q8UdOVEhH14sOUE7/VgiBeTL+LBIFXHHLRRr9xHj7BOMs48NE4tAd93zneJ8ll24TPoQyaT73Fr96GX/lFxVIn1tQ/tG4ugqiqMTUbr3SEx917lrPRBXD4fKg4kJXGppNTFDXLVpjd28JFiCFHj/OnYRYtN768vUx7E8kp0iNCeynkfffUuwYkhJKhzLvYf53ygRy81HgwQQy80cnwz+t4YCbtG8sOCesS4xPVE44x5DA8dTuLjEaXFp4Hv0YF4jroO+GnCO+P8yscxIv+VFfCntAGjxWKguHrdc0OIxTpLM6aW0QOxCzaCpCYw0K8LF0tBYExoqQRntrwktdYNtbObZwZoQIfVg/WMack2YE6wJc9n0ceYrAOeSnjDYIxwg2y0Zh98q8NVaK6Sqaj6tDLQeLECdlFw3xKqAuPfztC4u8dEtP/15JsKwiBPrwFZaB7aKdWDrbOvASfaAT1/f642fKIp/tepuY7I366Hicq4/mKb5pT+XvTHTpbkX8liaOzgf8mj2zIIJ+yctAA29yRvrqwCp/d/E1N08zWA8U9yDlooR+QksjyQfhcqMxDJfYmE7eH2ay76vVR+N0/xaZZrPEVN8ZX2CX0lTezFp/Enfj4UhGKrrNH6sTe1WX8qZ/iBN6u/xmTeSRUuENsdhE7ktfCLbs07kpm/kSLcbrchVxitWG2vHGlmThtsGSr0bO246D0nWhdddNeTBSH40rVKoOkCo8R7lGz3XEODYJAGH1XBLnxt8qUfafl5LX6DBdeF2TppynPVBwhZKqDUnSPX5E5sTtlLCK7jYY+sJ0svpw1zKBvyddGl+UAexe2/o8odPb6J1aJNYhzb561Co8jylrkP4CbjZFiFM34GTiu4eRGvKn+AwEfqxvBX+DP5LHxukKwDqC0k/eOQ0GfHXtNznMYshmEYpLn7hwBE5xItvwL/oq16olJCwTrVABpF1FUh8wgmyu+AkiSMJMoyL33RMe7lUvQHPI3IQWvi5YlGHyOLUylCY2Y4fOq/ghlSe7C4y1N9WqLhOdHs86uw/emun09c7UHWOlyuO6OSqg49KH4VGdYPqbRTRolSnUMRN4oz6cnoWX7+dJCBBI4KclImeHyqXSa3j+J9eu8HBw+UL4wWvhN9EENAKCXseQEis4N14mV26W1eU02HxPryr1qEbffEtTFGubySbl33coE4UxsSkkLiu8hxc8W9UVvz5Yl2fI/aJj/D3iWi/Hwm9XmbJ3WMYHzh3YXxgiPjA9F+GCdisvsLXMSTygQZrmOO/y2QIjdg19Vgy7SL+0KZKZGvz5BF66+WReWvr+ef7nMJnFsgVbg2W/gjKaJErXKVwBXn6xtzg9I15VO+6KZZgI7hun0QC5sPW5U1h67Kq0ruwddn/tkW59P/O4pxM/HkXZ/9TE1T6A12ovxayFMtKU3f469qFLDYv1FecN8627MjFRlaf3pP7JmPhxqK40JAMr0DX0ealZomy1PjFvQMXm/XaYoNLzf3+UrOKRyw0qCFNj2ZG+kR+Cb/SuLD+lUayTyTBU+Bb1NbsAz+n4QMv1h+xkEz/AWlUuQ/dWaWa5iEWGsWISYtzeD5LY6aX45SvtcsJLfeG99Mspo8y+p+xOyezzPdFWKc4q/yUnX4/Q896goSYb3Xf3ukt5Ca8UJazLhBEyXKkvGjtPZbYM87nw7gnjbBGZOX9uDqKVQRP72kcyiPeIsJwvYZi9cTXP0E5v3QOrqdElsukLeUTmAhyebksbHk5n+9KiEEGfkwMbTtCrTJUUfLW+vd3LsgqjuRQHo5nOuAuYOv1T0f59hoevrFhMpeYQJTgv0eAg7vOSds8RX/cUy65m5eGzDtDLD1Qn4tfZNou76t4+RZeflCfqTRJSYW7QJ9wDanPd/HyaebLuj04drwillZ5KLe8PYYNkwZyX2G+GMjzwicmfvkV+b8/MT+GE/NmmpimAne+L0uWXn7zxTRt52sF919GUublJFfGyVv/vCoWX1fRPlPauNWCWetbtamSH1uR/DSs2lSxsa2IjQOrtrrm1jY1t1KA05jQtfl1rWUbzTbi/avFGCcFHpCgV/Pt0z7GKiZdQ0uroAyK9iVKl4nfS0NOrEMz8rnExtsQUPgi5nnBKQXzyEybVoHpYQu/hclIRKTYk++2fLER2oy3y+MI6IMTwXdZqR19e1G/3UbxV4j4OYGn3OORSo20K5W00/PQYhq97GTael9BWk+PWNqzjprG9LSNatrvOUKR3/MjItcRUcoR8aYjZNJ11Z+DwP7eXhbS7pfjb71DYq9RY5cFsZtkbWYx7QueWk4N3h70EH44Bo+7iBkZsHcamfD7LwtpKJAnITKKucG3YGhYQEKeXGyFDI5FbGYH6Q46pBZhMZ51gdivbMuPuKzCb5pAWm7cGRs+vYt6YJfogV2CHXBUNENGToK1DV1z9hKu4Ag2aR85iMoB6dP2MHaxWHPcrVXcMbEc0c9wMaRvG5+XCogHEyC9hLv7uviR9rPOoXxvzgm0rrs6otL5vXy25FZyX8Cs32HBAUherSg/kD7hglfwIW6VThqQX+orS0LJ74MXxqt/gqjmgrHty1m4JEKqUb6GbIr2u1/HCwkh6GPmxJrpm3o4IoltuUX3jGjQiWNye98AAmJ81rXS+KnDZIAD7mj4N+I42BqT34O4N3jB3vKEe0zqWgx+ToLdsG/BvbDOtglgt5TpkrM/4m1C1HkpNZCwu2EHj+Je6jCsSH6sUi4WRcMksP90KyzQgOGE5I8FvYufksfZQSIeKFS8qap8H4kFcELKD9cTUZu9vH4j/vwAkcBJQgILm/wh2+lTcu18DV/CB+sS4ZVcapmD6+XKPV5XWHNE7teIMhAn2FReG7+MoP48UR6hhsUqaninHUgP0IFdrGA1ywcMiB9sHeJHFIjfVrfEsVndNsvP0Ub3bc1HYDVBhza5VhI29++A/PblDn61gAyrBWBYLfiYhBDzyaOf+8bxTccDCI52knRF9+ifcqdoX4zR6rRbNNzaGvCEOcT85wYoouk4H0VUO/0wtMEWpEPwcHkDUOwNAMXlDUCxrg4crkRAMS8AFPPrwGBeE6CQaRuUtNNXB4Bifh0wbFTTN1jas1epaev0tKvVtNWYJq5HRK4jopQj4k0SUFxD5G5/gOTepDH8FsRHqZEsb4EiNVovpUZBgnwXJmypv16NvV6NXRHEbpU1bnIdCM622lp/aiURpW48hkdeCTwxc5zGxarGuICkAiOM0SEtEVKCsGkfPeCwlrAAHeTmB+nBiM8ySf7DiCbo6xP+TBExR8S/YTEpEXjsknNkTQNZ4GJ4gLRjlxBe5H7KfBmIk58sTPgbl53O/6GNS4w2UL6soY3czzBDHWjkfo6XX+CFDjD6JV5+hY//qdLFB2Sv923Gwuz1/iRwUscls0rfqLixQFbQTnKwjUThYExvu0MeKOQjDxQl+B/eooeOVXETPu5KNIKDi4BIIC0bLdfO82nay1phktmVzXVgIsEBQRClLeK98oz9usjhQo5d+C1eflcvtI44mmEGE4TaJPBndZJgRnsI0GjzgYZDQCP4Zm3kguHGQ/nz6uqLRdZiv8wlAdhA+IUAGMXBVXTbHGfssZZvSz7OAqARpr94LFdkiaH2bY9lin1bADRs3a1ddPi9nN1rsXttdpykj48hCUc9/jgZq4vIx9oktAuWVoRvl2jTpklZUdJWPcmbLelrUr6imtOThBQN8tqZ73/f2NA31DUKP6bo+7gvZQw89kI39AHj5WTQLh9kgaZF9MPMhuCYCoYQBk3Z10khgSX6YLh0BYtITfvJdnaqnVW+QtS/IpD1LvAhwMlFDXiIoMc/60JAHRTZtBGwkWcpShd+XyfvsMvlKegC2Ij5KuW2qwWSIaQyvZEOZJovgdZyFNLeZ5E9/NXSoGcvH75PcTvHs9wOSOHnrGcXxifoS9aozpSCvktlB/4gYmcPXlPt3IcxtWGPvaHRLTff7M8l8f6S+/+1d+3BcV1n/dx9SbuSrLdkWbKzzsOW7ViPlSxbsqP4mcTEUZp1EiV1HWetXdsrSytpd5VIwS4lD+xQ6COBTKcFksxAhmlLGQoDTSb9A1wYUhhKyhCmw3SaMMxQGB5/MCX8wSSc7/edc+5j7+7Ksp24HVves7vfvefeu+ee+53v+32vKU47PTP3JHsOkQCUPEt3ly4c2YAUxozYckJK+vTK0FqOr/s4J7MPEldeKoBvZ5boism0rss6qqvB6uNdAsD4o2bRsF0P69xcHRx8r2bjAAwokaB2+KcpiZAiWvbm0c6hXeDkOyGzUMB7CZUIqc+JU8rB9AQ0VdqX/JAMNZv2C+A2m3GMRVo9tmL1aIAVKQw7UgdqeLcqkDqG9YSU1Varmfk+PVIxze1eDgsXx5c87zR4vSsM28ECYZpuA/e7+56lyXARvO6iHR90IEzZdd4krjgNozGjl8TAYN+mhaNGpaCzfHHQCFIrl+CgEc09y+KgJoNJCIii8VtkhhfRDK9GZSVRELPybdYj4FQfT9MdR7yy/GGPknK5fFj3RGI5jtjpggZJn6Q+SUXtukkDoTwnAlwa/o0Wp+Dj2mxSo9gEfHKT1CI2WZNLsVDR7dd58Pj/BOVASq49DUu9HWi16jFb7z9mpFmXDluDHrYGe9icQ/NQsBhyjyMu+o7g+TViucUM0hoMkqMjjdIGE4bEWWUW7g3ItWQ6ogfzCn9nmbmx0t/Z6Lzcao8BYxJy8SMqTaA6TKMmsVwUS7PCd4weEuebydUdA9QsB0gOF9bHLrmcslZK32lWtWILzao2BMrL+dPKk6kJX9p4TZ0Lyv+Tl3HZSy9Y6tLqzHz7NAX2EZrAPiirGno3mlDmRmSD2lhhDrnFc4qt5nuzQQliFVCCIfkaRvpr1a2Ffpl6ZugAI/LM7WL505gCrfYUSO8U59ohOPlNeLpxu+REHcVtGQ+nx4QceDpsp9T45QOe24J9OE027XMyTA+/3mcc+4QpiQAFAEZCZK4L6O3pvSwySfoj8jUpH4PJhZcka7hPTDqzLe6DiESCUnk5yVFW4W5OdBs/pfOyxKma85VHQ8gjVY2GGEz4REOo9f16krSubTyELeYkZq+lN3rCicuosa4k9pETNOwLDEulL2NKcNImKYMQRgGfuerXlFDXZIwPnBxo12XPRJ5/+HwFDk+DVxsg2aElwom5HOf3OZ2fW5w3USaH0yx1/kQo58XhxKhLAqbppE00BzO5bCYdY5iFQgrNQxeXg6we5kxGoy7NpuvIrApWSsfs5JX6eDhdmg/tzVhpy9e2W8ZzWg6ugqDYphv4U3mwFPhwkJjJMTxpOW0gokNSRo4iDiZI5WdS7KrhF80DS10md4JicxHGk3xHKDMSja6UkRmOg+GHZO6l5L8K5RmCHYxgPpX8sd4nnaTDQqzvRdiiEc6TUcsD0cQk4VdIyP46hOwIPD+0qN2F7EYNygJERcFjAGdaZcuhP1EE/5B1h7xD3ABOEDagkNVmBQPubQ2qJ50tCKinSQUSNSG7oN6jC8L+UwbgaaNXgPxWN1msdvigZr1h+6akszxn51OFTIGxLcBadXoYPMoLxgdj4qwoU1guyBu6SIhZYorGKm7GKhLwA7TWdrZ/EmoIjX5EvRfWWww/IcDw+RjaINoatA1o69FCXH++rqwr3PMBX1c4R4QVgVMlORd9wCkgUxdCDnAqIGx6RFyIigsxcSqiUaoQNoXFhRpxoRZb67A1gq0RjXbJrfXiQoM4hUzctCkMOhODJHr9ksbYjBBAs/qaYV6pipiXVPaCGupyZZyUF2pDXQ5hsdYBdZXkXvWXChnqqnVAXZ2lUNdaQF1dlaCuv8O9XFcR6uo2UFePgbrW+0JdGwzUdZMT6opXg7o2slh+pxvqoigChrpuwFo3YC1fWCtpEZMNUBO0Spfd1aFWyRAdKkxNhJoaamqp8cGobDbvWgtpVfiA+DuoWINWCDfRAIU1o5EcUENMogLEJBXUAasEWeqxCKAKaVhJOGAlZvk12muBoIMSUN4ol25QPlINlHemu5Hsys4UzcwlpJlLGFcVRqA7+0Ao/Chi8KMa4Ee8i0p+S2ymkyNka4EInAcGci5GquPFEFgf+25eDONLTOFH7yPY4X3xiHxNLuxCRpmLnKsQUbWSCZKvcx2zrG5mWU521CNcauUNFfK6VSGdcWXkea1Tfjm0gMRsNs2Zavo4GPfqyPmG13AwbqtmGn4Ce7KNthpBPdlOX0lCh+yfTZ9IZwvzM6llzr3tZjhItkYIOKsHiMU9AdU32UyHafGTzddKtlIYAD+KKo7U5ZLJS2VxkkTX2nypRvOlL1cRgJbmNTsKGXb0KYtqr6CCykWEunaxmFmSmJvEmoiOlXKLSREt7ZQTkwiJ56BZiyK5IPkgARfHTAV1uGxILL2l0cYQIu31Zf6BcvoimmQ9XSqrdzsEHgKvOpgPRTQf4hjYaeCTzGovgqvSBwzJM4pj834cFcGFWggBe5SYzHQz8R8IQe9Qd/s7eV11GeS8G8g5SU3NGiaXHSYX+sXkRcvLqsClHMWMNJc6Y2ASjqD8aOL/DeJyHfGkaxX/b4AjeLwsTvA9GBgYOOhRN5JAhrSigfcdaiNlKFSAZb/aQRF2zB4HIMVrEbHA3pYVsrDK3IsU/iQNPWcoCmmmx8zMw8fsJAIdlhNrUPY6x+cp6M3p7NSJLHNEcvhgrgYr31rFy2wmdkJyPwbN4OXq2pBcR4cAl+v2crkNkjBMXK4PXE7zuFaH3ynLXL1yWz3lMQqguobcCh5H7NvIXuPVeFxbqcglHDwusHoeV1kVvAweN+vP4x4qy+PkeBNTegffO5jnKU5XS+xsWmt8Fslaa5Sxj5iQ4UPC5kNrbT600bEmlzAijg8v9F1ziHXwOmRC10Qw+l3NGhwJh66YPzxgGEK0IkNYayk4kqDIE5RapEZ/yaY5bx19NumEgLKR7ppKp0E9AcmMxZ1bLC0bdXof95sl4RNhlW/A/3F3goF4xIntGDHm65Yj0MxKW84UoEt59cBftMzj/Rgeb+QGJXDHL7EYgzvexGJKaqkSb67LvQW0yc95yLWeU3QJdy6zdXwl3fKtxxVNHte1YEfSt3AoU1TFol+0NIt6hsOZiIkYzrIJCtxmVuACpMDpuHPFX1h76zWZRR630ls12+tMbyOFbGEb9rELIz9rpfvsffrtffBOYaiUC5OY7RqKj8KnRpuhkEVhAmK8xuu0ntTnKarGZr4rsp1cRzxiVZBQveUPCV1FhnPFwTwN9GCvgZpEjR28/hPwKcsDFsGT22WxWakByKkfNolSY5CzkFla6ox9fewjztphZYZpm3PKpuYAwzQVPln48fBPTxYOiGDggP7MbzNBQzbziyHeVls5HDaOYJQlnrI2DpzIz54Bd17onXQuD9oF5guBizm0LZW1aNHMvnBnKVGHBeRfQkYhjbj//E0h6zb0bOjpbmtvByene06LDIJ//jR0FWP9fQ0cXP1MRwh7y96TsLU7RBH+b4ZIiKtFbAR2pg9aD1RiV43YqRx2op6ia9O1Kn7IKfaBrFJGVagRSiXaVlYm1JWbsklXDWUozOm0NAXAvs4IjfViea/e25Gpcp3JVNltMlW2ODNVtuhMlZMLj1nat1Z+fES+JpfeRhSzNhfgXN8OnG9C9dFa5CRo4osLr37INpQZst1+Q9ash6xZD1kLrq/FDFOrc5hoHNvEclYsnaTxkosufsOD4ny77nOO4IMOca6DB6sTn+LykzznWoRCn+9CFHAXrvhmrJWfC5xrw9h1soz9uYD87/JaqjAlb8EAxPTYSXnmlKWGuc0Mc9I6v46+n6OL6hbLQxb5M3HW8VUN823ecGi/QVfB185DbvWcYpv5jouFI1OdQER1nyt8OiGUL5O+lm7sT45NN8GLSZHNJErvlNfQg7vebt/19C5xrgdiTum9r6WoJpr0Y8iqoSKna9R8HgzSfN5N83kw+Ih8TS5815qkyKs9SI3xtJ0aI30HcRR5u8f5hnFY9cLTQv7n/Bh3imufH4NzrX6MyV6vJ7nqGoPS6xz3auPxuMfH+qNKvLkCVycOpQZs9Y9C+zsZ697O2aOLJ6fllavfMqZkJ3nMK8jZtyqZ9keivEzrG9l2xRLqu9S4g8ydwqRdicVrCGX5lHPFNNrbKScNGSXgCZVZyhY5trxyzBZEzyvIEMzSKRI6eULJbZl0vZEDPTFbCObyM2AYcQ8+ROR79OSZrMohzDYNDBm8jiDswvUImQTl5BwY5ETlmYKdeWYxl/yhUC43p5AugOYkp6uhr+zJn+yiEyPLFByThBCleUp1shoTJFB4iqTl36L9fAuOqOzn8rXBk564zeOpxLaRsMtTqUXtRV5IrQZnbHF4IYVh8+2C3xNJ6c9CStfHqlclTlQ+nBvC7Q3h9oZwe0O4vSHcfizC7UocL24Itz8Twu3QDeHWSGpu4XZndeG2TvgLt8kthCL+9Au2ya1uHx0feTa5jXa5nRoSY5Pb6dMPcWMsPe366dMqhFb56P3NT4nQSoyXpq6p1zG8onodadQfVokUg/6lOYJkxH6mJJlzsGppDtjKKEwyiLocIcg/YbuAz7SpTIx8pqoix3m40BtBkuTEOmX0hpxo9lWZkGg5bBNdxXrINW/jezviWulTBwuStUjoWK9deqYhW0LweVvI/yjfU4vyPb2dfuvkNTGM+/HRn/X1y2MYX+PDolZgCbeZkG0Tx6Hc5Td8reNtLtMIM5Jb6dNt1Gyy/HiDDnjniuS4pTdJLuequtHoiK2J4HG2jd8hn2zrX11RtvUKSdbLP5eXn2RdpVM3h+zwnKLTfLet4e5KG5GSShvK8M3dahyZ1G9m9egVR32NWyAu31qtvsZtKFxJz/kRi0zjlM+tF0Gr92P7FlNNY8ZshyE812+2c73Kr1gcmUuicz+Jzg2swa9hnVw4dTCIzwN+bOGGffv6sG9fqWk7cSWmbducPWSbs+kWUlrqqVQuPpebWVa94omBASTrKsSzuXgK9S98s2g3iRKhzj9zvF+Fisq2cFsyg1Wcqo8xP1xNbQoaQin3UynR4YiJt9B1iB6znjARf/qPBJYaLjJG+4adPJHSbC+/Ua0ysBBLL5BnUP5biFcQWsIxVQeX5ohPUekF23foGLKEgA1J7kqCBiWADwJc26N3t72lewW7+Z0DM+46F+JeAaTZm45pYGThOTG58LB8bRaTk7n1IiRlFYrr+gFQPnKroSzGCuWzk0FxtdhyCjRL76gL7SOpuxcjnlYOt4gE3ckhyymLIx/LvZnlk3NUD1w+q/n84nyRpwB2Oib08tyoZwQg4MVCXmV2cd1zumSSlPnchyMmjDLokIfrkXK8Q77arfWgGzd2I6NuDHpdPJ0hNirteNABuQYdkKuddjzkWEGDjhU05IJccTPCNuQKSAveoDStahHkI+fCZwMEtPYEyUurVpcD0PNCuV2FaG1Kc2QLicx1TmdRecMdi+25SmlC5SK4Q0NCWE1tWJUrO9foys61lCJCuaTTPKMf1E0/qJWWXQxdm10G+6JVbMd1o66JcTudt1TYznqTDsYegA0K06NMJ9MAEGWf6U46KCA+ynjCO9ysQD8DA8p1W1Fu05RNmrJZU3o1ZYumbNWUbZpyu6Zs15Q+TenXlAFNGdSUhKYMacqwpuzQlBFN2akpuzRlVFPGNGW3puzRlDs0ZVxT7tSUvZqyT1P2K8rCtDVpj+8B2uegumWHBOPhnDmVZKK7IJW8INRtXHhByP8M3N1tOAa78l4mhDfoC+Fl97mYD9z14NBeOOuQaZ5MzcxkKuIuag9Sd6SukyukAEGdyVKWxGWv2uMXMHU5oVLXWgwipMX/J69S54FUcUQORjyZKeazmScyG1EF2oHbTczF7+HRit8Fr1ti/pzZ8zbHnlIG0O7bycxsKn+2MBZXKNHPn8cqnGcyAjoXi6ekxHaLf/8HKd/4mJZYZO8G9D6VyWdyUxkS7eyoFXdPGq/44TRVq4YOB7Zfbr8JVdea0cruMtciz4adKCtpuQu+K5tLzcT3zZLtkXZmeyQRmWaHArr7PZyaWeSjj1U4elL/cv3L3hDKq89n5wP5DGUqPygbXAkycIJGpHLjtn/fkX0TBw6hB92qk6mZlDxhues+mDmZ5R+KwA36BjHU/3rSat8IX4r8CiHMZ2c5NVOFM5wMFdnE0Y8evIzZchQbyv0QVGlTw/Su8IDQzgmQPIJ9SI505Xt3XvpDyeShiQOP4iLI1jq1mKf7sFzYUmau2PwlPoGK5+hKwo2D9fCWcjd7X6EwN5WlW8i/ghPjaaLkKf7nHuhTtO3j+qlD2fQOn713zk5kloorKXeDDB8DiNJwCHp+gK0fTAtpbwUxkfCdNTI/S4rfoOb3hQ4tWgE4665ZymjsN0U5jUDx/0+SaJiDaMhVy0sBVBIX6Y/qmo8rUJS3Ud56CiGIWhslrfJfB7QKAkRbrFgAgib9bKNXfC9wFS34BhdY/jxJp8rYXhJeZLATd3hRUJs0y4YXhTVsGkJ4kUkLyLBpRMOmHA3ZJrqAdta6AIsODRqt6qpYOpXiqC6R53eNKv+FQZt6JDUqlqdw8TH74qWwCdt7wOcncFBlm4FxNxjY9iaGbTkj/fk1umCNJ9kdrOsqmZ36wllb5ZImpa1bBduM2yg1/iGKpaDLrRdGMg0qFFh+68XQ6IAqI6kGydOA/AtICZ3MNUPw3spCK+fskEIrnQJCq2z6hAKUpKAKIVU2CRZOZTPMQqlsRlgYlc0uFkJlM8bCp2z2CGXRlQInhE3Z7CXZ0RGLgTRrVYTBg6uV5U5nZp2g9fWEVV11IQ3CFlag4lwxNXO3/OlXKanJR5ZJ5C8M3z9DjUn3kSQrsEN1MKjS4OzJ5RP77p24f3Iiu0WUaAL2+k9fv7h9u+o0rtdReZ93lNlFr5mTuEfx/Szr6HgP6nmkYs8RO6hIUYalkOqJF8kXHNsK/ZWu1ojZkEr3cRLcjZiXFX4hLedH5k5nc/H78/KOZabOxu3MKpzScLzizxieHewzUsdicS6OEjfGGQ6iVNVDJMoegsBIHGJnlUMMuQ/B4xfnwOe9VfoOu/sqCFMl/tHv+6ocZIc5yD2q0k3clMwBnkr2lMJolaOMlB5FZb7fP1dELdxKvXea3vcl4/KXyFbVL1a1goerHGCXOcDRJ7PFqTNxlUy5MFal46jpKB/7nLzqeSltxk8ux6W2lp2peuGDA+X651jOHap2gEHvAfjE1X7wYMLbT52wakd7wh2FB098XvJ4ybMq8wvZ0Z5tB3DGu7OnipJNZ8o+3qbnDscp5YQiDl71bPaEUoxKaeFVR3TUvqXgKMpxqXB7lX4D9q2cIcaCXNjlOwzO2vtD13Bx6C9yXNzlW05hEPCpjYXsePNyRT5zorA4n8kn+ASVa0hg4eSkfKczA8nPCOGoIYE0TuxJvDgzU+CETrQowVvZMMLSUhOkprxImsNnoDl4tYYQXCHY1oqshAGd16nb5HSKqT0bADw3wAljjdWGMhSscYRX+AddIqBe0CX+y2W3hSIhVQZC7zYL9uvlTCC571hskaBNd6AaFlG/AWoY1KNC+wB/CdQIqFmI6ER9GtQaUH9BaPB6BtRaUH9d6EpYD4MaBfUfVHElSvhcoyrQQrVpUKYSE2WMDTE+QhuOsAZH+A9yugD1A/zARlCDlrZm/BhUTv/cZSnjae77oDaD2m9sqW+C2gLqIR1OnHsN1FZQj+litbkvgNoG6gL5lIL6FKjtoF6wqAIkUdOgdoD6m1axlakToHaC+ocGFd+lEvIR9S1Cx0G9GVTOA/Mjq9jB1HqBhHxFR0KXbsja/ir9oK3SJ2aThNPALcB+VLUUQtJdPJt7IjWTTR/X+xPkChwggXYI7Q60w2hHfPMnU/6hY3cejz95JlWML88t5vVqWIjqHROzY+M4wk60u9COws1/cIDfBvktwW9D/DbMbzv4bQRvA9xhdNSGlhJlf1pcDwZcaRh6uIse/gFqBqnZSQ1Vrkv2WBqi+EBoPINqzST/kz59n5qXqPkyNa9R8yo1X6UmRc2fUXPQ0kjJReImyKFC6ASq4ZRyGLqavycO8ylwmHrUtospLuN9RdE2lN1e9hUI21XyDBKxPigElUzTDEQhDC5/gyLsWDrRXIgTWqr87WGVM449uhTHOG64gDz4jFjKkKZNxks7O/8DsGdyXjihLaCUbwAZSySPodptXZJ/EN1UzaODF2htUMYuXUysIOR/l7d4mPjNM1xwux5u3lHhhjcot0C5AAP5ZDp8SVQCBfbmVmkUsrVkKEvHGczYKA+HnE/FJu1RQmeh6rFQ+kfIVX4T77tZvvVyyvatYoRwiW1slrsdteIkN5lmoGI7HMv62NcM1dja4MddS+XankGtvuWDhDNQTqh+dvgewBD9e2C5WajKaoPKRtcsQsW1sAW/b9lZ7tMoLEvBGsA+prsEH4+ONcQ574i8zoBRyBJP+wMhkfvTyelucVG4rvQu0ekkjYouPkqd8yhj+ij1hH+Ys5rbvlsbCS3abPfbo/vBX85z9ju47BxtUD26jfu5mo5yVLopmzxoPa4q0PVkOgN9vdtAzrgGobons1Lbmu3nN8Izyhcd32p/Pfbumy8cj1/6jfdef/XS1957/Svvvf7cpd9+7/VfvPQ1Bc/ujoNLs8H7z4XSlwtU6Is0RaUhxlMz+UwqLQX1JSkY2tKdPsF7r79Eh32V/r/x3KU/pjN967PmFC7n5WxacuZscdkPQiC8QCsT/SwYfrymsNXnLB/eMVIOuCgzApeP2GAd+sQ9wO0xaPllwrsuz1Pol6nxuAv9GjVwLbY9hUwNOQj68uaczqTlMpf8W9pIObpVHUM3CMf+yq9Qc4kawlKS39EXZKNzbEIztGHQ3hJuj28CIbZnc/HC4hT52Dg30WRliOJUNjOTxmh5aL04N7SE74pSwd9jFYBF4a+o+WuhrQxYmqFSIGcrzAXItwFHo3I+RhD7Z8+ms3mYA+4/ykWNkOoCmgo8j2jBht6Baa8eAvbj/J55NhvNo4o7VM4zCRMwn5rFezqVR3infM+zDU1+yJfKAY/L2xeWFwj3XqFK0Wm3a9Y6gnDXvpV0CuWUXS+1j0PWlKqLSz2aZY+Q1Wh1yn0are1ob0LbJdsYXnWooWs8noJaHqjzZjnTCazlWm9cW/DzeDFXmsOdRkf4S2FrDg8ZHeGbxKb5ELVa5o+ouCuVgBZ1jZQKQF9qWdytc3ULolt9Sbegt1uDqxuyIpHXqbtbMWi70jg6q4sG96fbzc4OTpNx/Ng/femV4/HBvvj8YjFenDubyfU5jc9qe6KPVWuNzvTFViSz/xxJjxDcjfVvACq5UcX3LRbPzOWzT4Ehqh3G7GMPlDn24Kwfa7DVA3UW/SNsfm5OYbongOeX66nXLL+e4DXs6mc//veZB5yeAeYBYBPmEeWsYPR1PnuWjcs4GtcUSz2RsWkJ++NQ6UNGT/l4jck002Ab6pSQXI8ajzHL+6lVfvL8DOgwec+gonQt/zZ4z+HCVfqzjJQcMmBdWDjyGWZUYHKUuxrryOHZ+bl8kVkURud/NW9DBTRdtq3PLtlG2xQTi2kuyYOAG8gcst7w2fv15kwmJf8l3yfih5pPgohhJZ8YjkuXJFZvSJ9xaEBQfqAGQQP6PWr+SGg1COrSD6ghR8rkP1Pzb0IrU9C3oFtB6ep1q2S23yCUs4NmWkB1e9woWPcIp0blsvzSmO+ZnUsvzmTGaeIUyMn2ccn4mhCxUh9oCkTD8i+y1fpQfChItWpEBdAoCkxErI3W/8lPH4qXrXbrv7HPfpiEI4B8uMREEPsS+EPbf9XajE/bJWWtZL3H5dbN8rUkXy+rz0H1Pkcup/L1oqL9iXx9W75+B7BQuzVqDVn3yk/HrcetPVZjTUQy8Kj9F4zujr4WPRw9En0gejC6P9oY3RUdit4eXRdti8ajzfK1OzoefTb6YvRBuUe7/GuOjsr9BmS7LggoihaB/wfSp2TQ")))) | 20,606.5 | 41,186 | 0.971053 | 1,163 | 41,213 | 34.411006 | 0.984523 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148577 | 0.000049 | 41,213 | 2 | 41,186 | 20,606.5 | 0.822523 | 0 | 0 | 0 | 0 | 0.5 | 0.997913 | 0.997913 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 10 |
c13ebf18ba9d99a3e832c13ecb1b0ebbe41383df | 174 | py | Python | test3.py | Ambareezh/pyneta | a64a2c213847bdec0af4064730c2c6f1d47575c7 | [
"Apache-2.0"
] | null | null | null | test3.py | Ambareezh/pyneta | a64a2c213847bdec0af4064730c2c6f1d47575c7 | [
"Apache-2.0"
] | null | null | null | test3.py | Ambareezh/pyneta | a64a2c213847bdec0af4064730c2c6f1d47575c7 | [
"Apache-2.0"
] | null | null | null | print("Hello")
print("Hello")
print("Hello")
print("Hello")
print("Hello")
print("Hello")
print("New Chnage ")
print("New Chnage ")
print("New Chnage ")
print("New Chnage ")
| 15.818182 | 20 | 0.666667 | 24 | 174 | 4.833333 | 0.166667 | 0.517241 | 0.775862 | 0.862069 | 1 | 1 | 1 | 1 | 1 | 0.560345 | 0 | 0 | 0.103448 | 174 | 10 | 21 | 17.4 | 0.74359 | 0 | 0 | 1 | 0 | 0 | 0.425287 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 12 |
c13f28ad2f229d4ce68a679eafc21e240b8e1cdf | 21,737 | py | Python | Dalitz_simplified/evaluation_of_optimised_classifiers/gaussian_same_projection_on_each_axis_analysis/plot_gauss_dimenionality_analysis.py | weissercn/MLTools | 75dc566947437249ad077939941839126eb20016 | [
"MIT"
] | null | null | null | Dalitz_simplified/evaluation_of_optimised_classifiers/gaussian_same_projection_on_each_axis_analysis/plot_gauss_dimenionality_analysis.py | weissercn/MLTools | 75dc566947437249ad077939941839126eb20016 | [
"MIT"
] | null | null | null | Dalitz_simplified/evaluation_of_optimised_classifiers/gaussian_same_projection_on_each_axis_analysis/plot_gauss_dimenionality_analysis.py | weissercn/MLTools | 75dc566947437249ad077939941839126eb20016 | [
"MIT"
] | null | null | null | import sys
import numpy as np
import matplotlib.pyplot as plt
import os
# Options for mode 'single_p_values','ensemble', 'ensemble_redefined', 'ensemble_redefined_noCPV', 'ensemble_redefined_optimised', 'ensemble_redefined_noCPV_optimised'
MODE= 'ensemble_redefined_noCPV_optimised'
if MODE == 'single_p_values':
dimensions=[2,3,4,5,6,7,8,9,10]
print("Gaussian same projection on each axis dimensional analysis \n")
p_bdt = []
for dim in dimensions:
temp = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection__0_1__0_085_bdt_p_values')
p_bdt.append(temp)
print("Boosted decision tree : ", p_bdt)
p_svm = []
for dim in dimensions:
temp = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection__0_1__0_085_svm_p_values')
p_svm.append(temp)
print("Support vector machine : ", p_svm)
p_nn = []
for dim in dimensions:
temp = np.loadtxt("../nn_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection__0_1__0_085_nn_4layers_100neurons_onehot_p_values')
p_nn.append(temp)
print("Neural Network : ", p_nn)
p_miranda_2bins = []
for dim in dimensions:
temp = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+ str(dim)+ "D_2_bins_p_values")
p_miranda_2bins.append(temp)
print("Miranda 2 bins : ",p_miranda_2bins )
p_miranda_3bins = [ ]
for dim in dimensions:
temp = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+ str(dim)+ "D_3_bins_p_values")
p_miranda_3bins.append(temp)
print("Miranda 3 bins : ",p_miranda_3bins )
p_miranda_5bins = [ ]
for dim in dimensions:
temp = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+ str(dim)+ "D_5_bins_p_values")
p_miranda_5bins.append(temp)
print("Miranda 5 bins : ",p_miranda_5bins )
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_bdt,label="bdt ",color='darkorange')
ax.plot(dimensions,p_svm,label="svm ",color='lawngreen')
ax.plot(dimensions,p_nn,label="nn 4l 100n ",color='blueviolet')
ax.plot(dimensions,p_miranda_2bins,label="Miranda 2bins",color='red')
ax.plot(dimensions,p_miranda_3bins,label="Miranda 3bins",color='indianred')
ax.plot(dimensions,p_miranda_5bins,label="Miranda 5bins",color='saddlebrown')
ax.set_yscale('log')
plt.ylim([0,1])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("P value")
ax.set_title("Dimensionality analysis gaussian same projection sigmas perp .1 and 0.085")
ax.legend(loc='lower left')
fig_name="gaussian_same_projection__0_1__0_085_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../nn_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
elif MODE == 'ensemble':
dimensions=[2,3,4,5,6,7,8,9,10]
p_1_bdt = []
p_2_bdt = []
p_3_bdt = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection__0_1__0_085_bdt_p_values_1_2_3_std_dev.txt')
p_1_bdt.append(temp1), p_2_bdt.append(temp2), p_3_bdt.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_svm = []
p_2_svm = []
p_3_svm = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection__0_1__0_085_svm_ensemble_p_values_1_2_3_std_dev.txt')
p_1_svm.append(temp1), p_2_svm.append(temp2), p_3_svm.append(temp3)
print("Support vector machine : ", p_1_svm,p_2_svm,p_3_svm)
p_1_miranda_2bins = []
p_2_miranda_2bins = []
p_3_miranda_2bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+str(dim)+'D_2_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_2bins.append(temp1), p_2_miranda_2bins.append(temp2), p_3_miranda_2bins.append(temp3)
print("Miranda 2 bins: ", p_1_miranda_2bins,p_2_miranda_2bins,p_3_miranda_2bins)
p_1_miranda_3bins = []
p_2_miranda_3bins = []
p_3_miranda_3bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+str(dim)+'D_3_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_3bins.append(temp1), p_2_miranda_3bins.append(temp2), p_3_miranda_3bins.append(temp3)
print("Miranda 3 bins: ", p_1_miranda_3bins,p_2_miranda_3bins,p_3_miranda_3bins)
p_1_miranda_5bins = []
p_2_miranda_5bins = []
p_3_miranda_5bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_p_value_distribution__0_1__0_085_CPV_miranda_"+str(dim)+'D_5_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_5bins.append(temp1), p_2_miranda_5bins.append(temp2), p_3_miranda_5bins.append(temp3)
print("Miranda 5 bins: ", p_1_miranda_5bins,p_2_miranda_5bins,p_3_miranda_5bins)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_2_bdt,label="bdt 2$\sigma$",color='darkorange')
ax.plot(dimensions,p_2_svm,label="svm 2$\sigma$",color='lawngreen')
ax.plot(dimensions,p_2_miranda_2bins,label="Miranda 2bins 2$\sigma$",color='red')
ax.plot(dimensions,p_2_miranda_3bins,label="Miranda 3bins 2$\sigma$",color='indianred')
ax.plot(dimensions,p_2_miranda_5bins,label="Miranda 5bins 2$\sigma$",color='saddlebrown')
plt.ylim([-5,105])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("Number of samples")
ax.set_title("Dimensionality analysis")
ax.legend(loc='right')
fig_name="gaussian_same_projection__0_1__0_085_ensemble_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
elif MODE == 'ensemble_redefined':
dimensions=[2,3,4,5,6,7,8,9,10]
p_1_bdt = []
p_2_bdt = []
p_3_bdt = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_075_bdt_p_values_1_2_3_std_dev.txt')
p_1_bdt.append(temp1), p_2_bdt.append(temp2), p_3_bdt.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_svm = []
p_2_svm = []
p_3_svm = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_075_svm_p_values_1_2_3_std_dev.txt')
p_1_svm.append(temp1), p_2_svm.append(temp2), p_3_svm.append(temp3)
print("Support vector machine : ", p_1_svm,p_2_svm,p_3_svm)
p_1_miranda_2bins = []
p_2_miranda_2bins = []
p_3_miranda_2bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_2_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_2bins.append(temp1), p_2_miranda_2bins.append(temp2), p_3_miranda_2bins.append(temp3)
print("Miranda 2 bins: ", p_1_miranda_2bins,p_2_miranda_2bins,p_3_miranda_2bins)
p_1_miranda_3bins = []
p_2_miranda_3bins = []
p_3_miranda_3bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_3_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_3bins.append(temp1), p_2_miranda_3bins.append(temp2), p_3_miranda_3bins.append(temp3)
print("Miranda 3 bins: ", p_1_miranda_3bins,p_2_miranda_3bins,p_3_miranda_3bins)
p_1_miranda_5bins = []
p_2_miranda_5bins = []
p_3_miranda_5bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_5_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_5bins.append(temp1), p_2_miranda_5bins.append(temp2), p_3_miranda_5bins.append(temp3)
print("Miranda 5 bins: ", p_1_miranda_5bins,p_2_miranda_5bins,p_3_miranda_5bins)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_2_bdt,label="bdt 2$\sigma$",color='darkorange')
ax.plot(dimensions,p_2_svm,label="svm 2$\sigma$",color='lawngreen')
ax.plot(dimensions,p_2_miranda_2bins,label="Miranda 2bins 2$\sigma$",color='red')
ax.plot(dimensions,p_2_miranda_3bins,label="Miranda 3bins 2$\sigma$",color='indianred')
ax.plot(dimensions,p_2_miranda_5bins,label="Miranda 5bins 2$\sigma$",color='saddlebrown')
plt.ylim([-5,120])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("Number of samples")
ax.set_title("Dimensionality analysis redefined 0.075")
ax.legend(loc='upper left')
fig_name="gaussian_same_projection_redefined__0_1__0_075_ensemble_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
elif MODE == 'ensemble_redefined_noCPV':
dimensions=[2,3,4,5,6,7,8,9,10]
p_1_bdt = []
p_2_bdt = []
p_3_bdt = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_1_noCPV_bdt_p_values_1_2_3_std_dev.txt')
p_1_bdt.append(temp1), p_2_bdt.append(temp2), p_3_bdt.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_svm = []
p_2_svm = []
p_3_svm = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_1_noCPV_svm_p_values_1_2_3_std_dev.txt')
p_1_svm.append(temp1), p_2_svm.append(temp2), p_3_svm.append(temp3)
print("Support vector machine : ", p_1_svm,p_2_svm,p_3_svm)
p_1_miranda_2bins = []
p_2_miranda_2bins = []
p_3_miranda_2bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_2_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_2bins.append(temp1), p_2_miranda_2bins.append(temp2), p_3_miranda_2bins.append(temp3)
print("Miranda 2 bins: ", p_1_miranda_2bins,p_2_miranda_2bins,p_3_miranda_2bins)
p_1_miranda_3bins = []
p_2_miranda_3bins = []
p_3_miranda_3bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_3_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_3bins.append(temp1), p_2_miranda_3bins.append(temp2), p_3_miranda_3bins.append(temp3)
print("Miranda 3 bins: ", p_1_miranda_3bins,p_2_miranda_3bins,p_3_miranda_3bins)
p_1_miranda_5bins = []
p_2_miranda_5bins = []
p_3_miranda_5bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_5_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_5bins.append(temp1), p_2_miranda_5bins.append(temp2), p_3_miranda_5bins.append(temp3)
print("Miranda 5 bins: ", p_1_miranda_5bins,p_2_miranda_5bins,p_3_miranda_5bins)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_2_bdt,label="bdt 2$\sigma$",color='darkorange')
ax.plot(dimensions,p_2_svm,label="svm 2$\sigma$",color='lawngreen')
ax.plot(dimensions,p_2_miranda_2bins,label="Miranda 2bins 2$\sigma$",color='red')
ax.plot(dimensions,p_2_miranda_3bins,label="Miranda 3bins 2$\sigma$",color='indianred')
ax.plot(dimensions,p_2_miranda_5bins,label="Miranda 5bins 2$\sigma$",color='saddlebrown')
plt.ylim([-5,105])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("Number of samples")
ax.set_title("Dimensionality analysis redefined noCPV")
ax.legend(loc='right')
fig_name="gaussian_same_projection_redefined__0_1__0_1_noCPV_ensemble_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
elif MODE == 'ensemble_redefined_optimised':
dimensions=[2,3,4,5,6,7,8,9,10]
p_1_bdt = []
p_2_bdt = []
p_3_bdt = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_075_optimised_bdt_p_values_1_2_3_std_dev.txt')
p_1_bdt.append(temp1), p_2_bdt.append(temp2), p_3_bdt.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_svm = []
p_2_svm = []
p_3_svm = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_075_optimised_svm_p_values_1_2_3_std_dev.txt')
p_1_svm.append(temp1), p_2_svm.append(temp2), p_3_svm.append(temp3)
print("Support vector machine : ", p_1_svm,p_2_svm,p_3_svm)
p_1_nn = []
p_2_nn = []
p_3_nn = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../nn_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_075_optimised_p_values_1_2_3_std_dev.txt')
p_1_nn.append(temp1), p_2_nn.append(temp2), p_3_nn.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_miranda_2bins = []
p_2_miranda_2bins = []
p_3_miranda_2bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_2_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_2bins.append(temp1), p_2_miranda_2bins.append(temp2), p_3_miranda_2bins.append(temp3)
print("Miranda 2 bins: ", p_1_miranda_2bins,p_2_miranda_2bins,p_3_miranda_2bins)
p_1_miranda_3bins = []
p_2_miranda_3bins = []
p_3_miranda_3bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_3_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_3bins.append(temp1), p_2_miranda_3bins.append(temp2), p_3_miranda_3bins.append(temp3)
print("Miranda 3 bins: ", p_1_miranda_3bins,p_2_miranda_3bins,p_3_miranda_3bins)
p_1_miranda_5bins = []
p_2_miranda_5bins = []
p_3_miranda_5bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_075_CPV_miranda_"+str(dim)+'D_5_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_5bins.append(temp1), p_2_miranda_5bins.append(temp2), p_3_miranda_5bins.append(temp3)
print("Miranda 5 bins: ", p_1_miranda_5bins,p_2_miranda_5bins,p_3_miranda_5bins)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_2_bdt,label="bdt 2$\sigma$",color='darkorange')
ax.plot(dimensions,p_2_svm,label="svm 2$\sigma$",color='lawngreen')
ax.plot(dimensions,p_2_nn,label="nn 2$\sigma$",color='blue')
ax.plot(dimensions,p_2_miranda_2bins,label="Miranda 2bins 2$\sigma$",color='red')
ax.plot(dimensions,p_2_miranda_3bins,label="Miranda 3bins 2$\sigma$",color='indianred')
ax.plot(dimensions,p_2_miranda_5bins,label="Miranda 5bins 2$\sigma$",color='saddlebrown')
plt.ylim([-5,120])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("Number of samples")
ax.set_title("Dimensionality analysis redefined 0.075")
ax.legend(loc='best')
fig_name="gaussian_same_projection_redefined__0_1__0_075_optimised_ensemble_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
elif MODE == 'ensemble_redefined_noCPV_optimised':
dimensions=[2,3,4,5,6,7,8,9,10]
p_1_bdt = []
p_2_bdt = []
p_3_bdt = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../bdt_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_1_noCPV_optimised_bdt_p_values_1_2_3_std_dev.txt')
p_1_bdt.append(temp1), p_2_bdt.append(temp2), p_3_bdt.append(temp3)
print("Boosted decision tree : ", p_1_bdt,p_2_bdt,p_3_bdt)
p_1_svm = []
p_2_svm = []
p_3_svm = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../svm_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_1_noCPV_optimised_svm_p_values_1_2_3_std_dev.txt')
p_1_svm.append(temp1), p_2_svm.append(temp2), p_3_svm.append(temp3)
print("Support vector machine : ", p_1_svm,p_2_svm,p_3_svm)
p_1_nn = []
p_2_nn = []
p_3_nn = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../nn_gaussian_same_projection/"+str(dim)+'Dgaussian_same_projection_redefined__0_1__0_1_noCPV_optimised_p_values_1_2_3_std_dev.txt')
p_1_nn.append(temp1), p_2_nn.append(temp2), p_3_nn.append(temp3)
print("Neural Network 3 layers with 33 neurons : ", p_1_nn,p_2_nn,p_3_nn)
p_1_miranda_2bins = []
p_2_miranda_2bins = []
p_3_miranda_2bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_2_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_2bins.append(temp1), p_2_miranda_2bins.append(temp2), p_3_miranda_2bins.append(temp3)
print("Miranda 2 bins: ", p_1_miranda_2bins,p_2_miranda_2bins,p_3_miranda_2bins)
p_1_miranda_3bins = []
p_2_miranda_3bins = []
p_3_miranda_3bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_3_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_3bins.append(temp1), p_2_miranda_3bins.append(temp2), p_3_miranda_3bins.append(temp3)
print("Miranda 3 bins: ", p_1_miranda_3bins,p_2_miranda_3bins,p_3_miranda_3bins)
p_1_miranda_5bins = []
p_2_miranda_5bins = []
p_3_miranda_5bins = []
for dim in range(2,11):
temp1,temp2,temp3 = np.loadtxt("../miranda_gaussian_same_projection/gaussian_same_projection_redefined_p_value_distribution__0_1__0_1_noCPV_miranda_"+str(dim)+'D_5_bins_p_values_1_2_3_std_dev.txt')
p_1_miranda_5bins.append(temp1), p_2_miranda_5bins.append(temp2), p_3_miranda_5bins.append(temp3)
print("Miranda 5 bins: ", p_1_miranda_5bins,p_2_miranda_5bins,p_3_miranda_5bins)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(dimensions,p_2_bdt,label="bdt 2$\sigma$",color='darkorange')
ax.plot(dimensions,p_2_svm,label="svm 2$\sigma$",color='lawngreen')
ax.plot(dimensions,p_2_nn,label="nn 2$\sigma$",color='blue')
ax.plot(dimensions,p_2_miranda_2bins,label="Miranda 2bins 2$\sigma$",color='red')
ax.plot(dimensions,p_2_miranda_3bins,label="Miranda 3bins 2$\sigma$",color='indianred')
ax.plot(dimensions,p_2_miranda_5bins,label="Miranda 5bins 2$\sigma$",color='saddlebrown')
plt.ylim([-5,105])
ax.set_xlabel("Number of dimensions")
ax.set_ylabel("Number of samples")
ax.set_title("Dimensionality analysis redefined noCPV")
ax.legend(loc='right')
fig_name="gaussian_same_projection_redefined__0_1__0_1_noCPV_optimised_ensemble_dimensionality_analysis"
fig.savefig(fig_name)
fig.savefig("../bdt_gaussian_same_projection/"+fig_name)
fig.savefig("../svm_gaussian_same_projection/"+fig_name)
fig.savefig("../miranda_gaussian_same_projection/"+fig_name)
print("Saved the figure as" , fig_name+".png")
else:
print("No valid mode entered")
| 47.564551 | 213 | 0.721259 | 3,502 | 21,737 | 3.991719 | 0.039121 | 0.015452 | 0.122756 | 0.040132 | 0.950783 | 0.926747 | 0.917662 | 0.912941 | 0.912941 | 0.903069 | 0 | 0.063706 | 0.155817 | 21,737 | 456 | 214 | 47.66886 | 0.698093 | 0.007591 | 0 | 0.786932 | 0 | 0 | 0.358835 | 0.251588 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.011364 | null | null | 0.116477 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c168abba463544e2152ebee51bb664861cdad037 | 519 | py | Python | env/lib/python3.6/site-packages/web3/db.py | bopopescu/smart_contracts7 | 40a487cb3843e86ab5e4cb50b1aafa2095f648cd | [
"Apache-2.0"
] | null | null | null | env/lib/python3.6/site-packages/web3/db.py | bopopescu/smart_contracts7 | 40a487cb3843e86ab5e4cb50b1aafa2095f648cd | [
"Apache-2.0"
] | null | null | null | env/lib/python3.6/site-packages/web3/db.py | bopopescu/smart_contracts7 | 40a487cb3843e86ab5e4cb50b1aafa2095f648cd | [
"Apache-2.0"
] | 1 | 2020-07-24T17:53:25.000Z | 2020-07-24T17:53:25.000Z | class Db(object):
def __init__(self, web3):
self.web3 = web3
def putString(self, *args, **kwargs):
raise DeprecationWarning("This function has been deprecated")
def getString(self, *args, **kwargs):
raise DeprecationWarning("This function has been deprecated")
def putHex(self, *args, **kwargs):
raise DeprecationWarning("This function has been deprecated")
def getHex(self, *args, **kwargs):
raise DeprecationWarning("This function has been deprecated")
| 32.4375 | 69 | 0.674374 | 58 | 519 | 5.965517 | 0.344828 | 0.092486 | 0.16185 | 0.219653 | 0.789017 | 0.789017 | 0.789017 | 0.789017 | 0.789017 | 0.789017 | 0 | 0.007371 | 0.2158 | 519 | 15 | 70 | 34.6 | 0.842752 | 0 | 0 | 0.363636 | 0 | 0 | 0.254335 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.454545 | false | 0 | 0 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
c1812fd3ed722f2c1bce2933bf75ee6b7eeca819 | 211 | py | Python | GooseBib/__init__.py | tdegeus/GooseBib | 498f1dee9d0668acd0e039f108673a78233e91e0 | [
"MIT"
] | 4 | 2019-02-20T23:56:35.000Z | 2021-12-14T19:07:52.000Z | GooseBib/__init__.py | tdegeus/GooseBib | 498f1dee9d0668acd0e039f108673a78233e91e0 | [
"MIT"
] | 24 | 2018-03-31T17:04:33.000Z | 2022-03-30T16:41:09.000Z | GooseBib/__init__.py | tdegeus/GooseBib | 498f1dee9d0668acd0e039f108673a78233e91e0 | [
"MIT"
] | 1 | 2022-03-05T19:54:54.000Z | 2022-03-05T19:54:54.000Z | from . import bibtex
from . import journals
from . import recognise
from . import reformat
from . import tex
from ._version import version
from ._version import version_tuple
from .journals import get_configdir
| 23.444444 | 35 | 0.810427 | 29 | 211 | 5.758621 | 0.37931 | 0.299401 | 0.203593 | 0.287425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151659 | 211 | 8 | 36 | 26.375 | 0.932961 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
e79313fa0d142173ca96366cedb4a695b8c80087 | 2,984 | py | Python | speech_dtw/speech_dtw/tests/test_dtw_cost.py | toogy/pendigits-hmm | 03382e1457941714439d40b67e53eaf117fe4d08 | [
"MIT"
] | null | null | null | speech_dtw/speech_dtw/tests/test_dtw_cost.py | toogy/pendigits-hmm | 03382e1457941714439d40b67e53eaf117fe4d08 | [
"MIT"
] | null | null | null | speech_dtw/speech_dtw/tests/test_dtw_cost.py | toogy/pendigits-hmm | 03382e1457941714439d40b67e53eaf117fe4d08 | [
"MIT"
] | null | null | null | """
Author: Herman Kamper
Contact: h.kamper@sms.ed.ac.uk
Date: 2014
"""
import numpy as np
import numpy.testing as npt
from speech_dtw import _dtw
def test_dp_cost():
"""
Test by comparing output with that generated by Dan Ellis's Matlab code.
Some random sequences were generated, a difference matrix was calculated
and then the sequences were aligned using Dan Ellis' Matlab code. The
Matlab code can be found at:
- http://www.ee.columbia.edu/ln/labrosa/matlab/dtw/
"""
# Input sequences
s = [
0.31907989, 0.36886349, 0.88601401, 0.10647894, 0.55568399, 0.03362998,
0.08154014, 0.29557532, 0.44387983, 0.46424962, 0.83323797, 0.75725261,
0.9612375 , 0.45526512, 0.52581353, 0.82994492, 0.95958503, 0.81058366,
0.52815477, 0.28542249, 0.71805045, 0.11559819, 0.86323944, 0.27966765,
0.16590741, 0.18876726, 0.63849623, 0.26842587, 0.96991667, 0.36573135,
0.11002549, 0.62467659, 0.14615971, 0.60991811, 0.58880893, 0.66059894,
0.54244158, 0.11051738]
t = [
0.7841284 , 0.38693716, 0.22540918, 0.36019438, 0.22338693, 0.97122249,
0.39489736, 0.65732219, 0.8382532 , 0.22341524, 0.45894789, 0.40387544,
0.04906017, 0.92050544, 0.88682675, 0.17118542, 0.96703938, 0.5522097,
0.45500812, 0.54280973, 0.99330313, 0.40290325, 0.60708765, 0.72397749,
0.2036831 , 0.11112938, 0.25064554, 0.26799352, 0.25187908]
# Calculate the distance matrix
dist_mat = np.zeros((len(s), len(t)))
for i in range(len(s)):
for j in range(len(t)):
dist_mat[i, j] = abs(s[i] - t[j])
cost = _dtw.dp_cost(dist_mat)
cost_expected = 6.43207308
npt.assert_almost_equal(cost, cost_expected)
def test_dtw_cost():
# Input sequences
s = [
0.31907989, 0.36886349, 0.88601401, 0.10647894, 0.55568399, 0.03362998,
0.08154014, 0.29557532, 0.44387983, 0.46424962, 0.83323797, 0.75725261,
0.9612375 , 0.45526512, 0.52581353, 0.82994492, 0.95958503, 0.81058366,
0.52815477, 0.28542249, 0.71805045, 0.11559819, 0.86323944, 0.27966765,
0.16590741, 0.18876726, 0.63849623, 0.26842587, 0.96991667, 0.36573135,
0.11002549, 0.62467659, 0.14615971, 0.60991811, 0.58880893, 0.66059894,
0.54244158, 0.11051738]
t = [
0.7841284 , 0.38693716, 0.22540918, 0.36019438, 0.22338693, 0.97122249,
0.39489736, 0.65732219, 0.8382532 , 0.22341524, 0.45894789, 0.40387544,
0.04906017, 0.92050544, 0.88682675, 0.17118542, 0.96703938, 0.5522097,
0.45500812, 0.54280973, 0.99330313, 0.40290325, 0.60708765, 0.72397749,
0.2036831 , 0.11112938, 0.25064554, 0.26799352, 0.25187908]
# Calculate the DTW alignment cost
cost = _dtw.multivariate_dtw_cost(
np.asarray(np.array([s]).T, order="c"), np.asarray(np.array([t]).T, order="c"), "euclidean"
)
cost_expected = 6.43207308
npt.assert_almost_equal(cost, cost_expected)
| 38.753247 | 99 | 0.657507 | 448 | 2,984 | 4.330357 | 0.345982 | 0.024742 | 0.015464 | 0.016495 | 0.704124 | 0.704124 | 0.704124 | 0.704124 | 0.704124 | 0.704124 | 0 | 0.514141 | 0.206099 | 2,984 | 76 | 100 | 39.263158 | 0.30477 | 0.153485 | 0 | 0.711111 | 0 | 0 | 0.004428 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 1 | 0.044444 | false | 0 | 0.066667 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e7da0a56dd652671f92313595de4ce862c5d6807 | 181 | py | Python | learning_augmented_online_algorithms/algorithms/threshold_functions/__init__.py | liv20/learning_augmented_online_algorithms | 2fce62f425e670e2623975888acca8738f328b20 | [
"MIT"
] | null | null | null | learning_augmented_online_algorithms/algorithms/threshold_functions/__init__.py | liv20/learning_augmented_online_algorithms | 2fce62f425e670e2623975888acca8738f328b20 | [
"MIT"
] | null | null | null | learning_augmented_online_algorithms/algorithms/threshold_functions/__init__.py | liv20/learning_augmented_online_algorithms | 2fce62f425e670e2623975888acca8738f328b20 | [
"MIT"
] | 2 | 2022-03-23T06:42:16.000Z | 2022-03-23T06:43:41.000Z | from .abstract_threshold_function import AbstractThresholdFunction
from .oms_threshold_function import OMSThresholdFunction
from .owt_threshold_function import OWTThresholdFunction
| 45.25 | 66 | 0.917127 | 18 | 181 | 8.888889 | 0.555556 | 0.31875 | 0.43125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066298 | 181 | 3 | 67 | 60.333333 | 0.946746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
99d14ad74fc196127ea9070c308cd1586a05a061 | 6,205 | py | Python | cacreader/swig-4.0.2/Examples/test-suite/python/cpp_parameters_runme.py | kyletanyag/LL-Smartcard | 02abea9de5a13f8bae4d7832ab34cb7f0d9514c9 | [
"BSD-3-Clause"
] | 1,031 | 2015-01-02T14:08:47.000Z | 2022-03-29T02:25:27.000Z | cacreader/swig-4.0.2/Examples/test-suite/python/cpp_parameters_runme.py | kyletanyag/LL-Smartcard | 02abea9de5a13f8bae4d7832ab34cb7f0d9514c9 | [
"BSD-3-Clause"
] | 240 | 2015-01-11T04:27:19.000Z | 2022-03-30T00:35:57.000Z | cacreader/swig-4.0.2/Examples/test-suite/python/cpp_parameters_runme.py | kyletanyag/LL-Smartcard | 02abea9de5a13f8bae4d7832ab34cb7f0d9514c9 | [
"BSD-3-Clause"
] | 224 | 2015-01-05T06:13:54.000Z | 2022-02-25T14:39:51.000Z | from cpp_parameters import *
# Testing correct and incorrect parameter counts being passed (kwargs and non-kwargs)
# Note that the implementation depends a lot on whether zero, one, two or more args are being wrapped
def is_python_fastproxy():
"""Return True if SWIG is generating Python code using -fastproxy."""
import cpp_parameters
# Note: _swig_new_instance_method is only generated when using -fastproxy
return hasattr(cpp_parameters, "_swig_new_instance_method")
# Zero parameters expected
x = Zero()
try:
x = Zero(z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x = Zero(0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.zero(z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.zero(0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Zero.stat_zero(z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Zero.stat_zero(0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_zero(z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_zero(0)
raise RuntimeError("Missed throw")
except TypeError:
pass
# One mandatory parameter expected
x = One(1)
try:
x = One(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x = One(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.one(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.one(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
One.stat_one(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
One.stat_one(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_one(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_one(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
# Two mandatory parameters expected
x = Two(1, 2)
try:
x = Two(a=1, b=2, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x = Two(1, 2, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.two(a=1, b=2, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.two(1, 2, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Two.stat_two(a=1, b=2, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Two.stat_two(1, 2, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_two(a=1, b=2, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_two(1, 2, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
# Single optional parameter expected
x = Single(1)
try:
x = Single(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x = Single(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.single(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.single(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Single.stat_single(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Single.stat_single(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_single(a=1, z=0)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_single(1, 0)
raise RuntimeError("Missed throw")
except TypeError:
pass
# Test that -builtin option throws TypeError if kwargs are used even when they look like they should work, kwargs are not supported unless using -keyword.
# Also same for -fastproxy option except that kwargs are supported by default for constructors. TODO: Fix inconsistency.
if is_python_builtin() or is_python_fastproxy():
# One mandatory parameter in API
x = One(1)
if is_python_fastproxy():
x = One(a=1)
else:
try:
x = One(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.one(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
One.stat_one(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_one(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
# Two mandatory parameters in API
x = Two(1, 2)
if is_python_fastproxy():
x = Two(a=1, b=2)
else:
try:
x = Two(a=1, b=2)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.two(a=1, b=2)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Two.stat_two(a=1, b=2)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_two(a=1, b=2)
raise RuntimeError("Missed throw")
except TypeError:
pass
# Single optional parameter in API
x = Single(1)
if is_python_fastproxy():
x = Single(a=1)
else:
try:
x = Single(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
x.single(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
Single.stat_single(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
try:
global_single(a=1)
raise RuntimeError("Missed throw")
except TypeError:
pass
else:
# Non-builtin should work as expected
# One mandatory parameter in API
x = One(a=1)
x.one(a=1)
One.stat_one(a=1)
global_one(a=1)
# Two mandatory parameters in API
x = Two(a=1, b=2)
x.two(a=1, b=2)
Two.stat_two(a=1, b=2)
global_two(a=1, b=2)
# Single optional parameter in API
x = Single(a=1)
x.single(a=1)
Single.stat_single(a=1)
global_single(a=1)
| 20.892256 | 154 | 0.633199 | 861 | 6,205 | 4.504065 | 0.11266 | 0.192883 | 0.260959 | 0.31769 | 0.790872 | 0.775142 | 0.755802 | 0.714286 | 0.714286 | 0.701908 | 0 | 0.02371 | 0.265915 | 6,205 | 296 | 155 | 20.962838 | 0.827662 | 0.152619 | 0 | 0.850394 | 0 | 0 | 0.105595 | 0.004774 | 0 | 0 | 0 | 0.003378 | 0 | 1 | 0.003937 | false | 0.173228 | 0.007874 | 0 | 0.015748 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
99d172a775890bbb5f5b0befd877bc31a03c72cd | 12,180 | py | Python | biserici_inlemnite/app/migrations/0101_auto_20211105_1142.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | biserici_inlemnite/app/migrations/0101_auto_20211105_1142.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | biserici_inlemnite/app/migrations/0101_auto_20211105_1142.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | # Generated by Django 3.1.13 on 2021-11-05 09:42
from django.db import migrations
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('app', '0100_auto_20211105_1127'),
]
operations = [
migrations.AlterModelManagers(
name='pozeaccese',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozealtar',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozealteelementeimportante',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeamplasament',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozebolti',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeclopote',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozecor',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozecorpbiserica',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozecosoroabe',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozedescrierebolti',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeelementearhitecturale',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeelementesculptate',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeetapeanterioareinvelitoare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeferestre',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajeexteriorcorp',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajeinchideretambur',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajeinvelitoare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajeinvelitoareturle',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajeinvelitoareturn',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajexterior',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajperetiinteriori',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefinisajtavanesibolti',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefundatie',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozefundatii',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozegeneraleexterior',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozegeneraleinterior',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeicoanevechi',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeiconostas',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeinstalatieelectrica',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeinstalatietermica',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeinvelitoaresarpantasiturn',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozemasaatlar',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozemobilier',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozemobiliere',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeobiectedecult',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeobiectedecultconservare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeobiecteinstrainate',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeochiesi',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeparatraznet',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozepardoseliinterioare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozepeisagisticasitului',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeperetedespartitor',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozepicturaexterioara',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozepicturainterioara',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozepisanie',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeproscomidie',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozesarpanta',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozesarpantacorpbiserica',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozesit',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozesolee',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozestratpictural',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozestructuracatei',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozestructuracheotoare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozestructuramixt',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozetalpi',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozetamplarii',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeteren',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozetiranti',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeturle',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeturn',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozeturnconservare',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozevegetatie',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='pozezonadinjurulbiserici',
managers=[
('cache', django.db.models.manager.Manager()),
],
),
]
| 30.992366 | 62 | 0.481117 | 726 | 12,180 | 8.067493 | 0.129477 | 0.088783 | 0.152979 | 0.229469 | 0.779751 | 0.779751 | 0.779751 | 0.772751 | 0.772751 | 0.772751 | 0 | 0.004324 | 0.392447 | 12,180 | 392 | 63 | 31.071429 | 0.787162 | 0.003777 | 0 | 0.816062 | 1 | 0 | 0.117953 | 0.049868 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007772 | 0 | 0.015544 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.