hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed756d7a8876dfcbc14a44ef51d43404a7b64d88
| 28,888
|
py
|
Python
|
tests/hwsim/test_suite_b.py
|
wolfssl-jp/wolfssl_hostapd
|
458ca6d59a6dac97b3d6870132740b255ca7929d
|
[
"Unlicense"
] | 3
|
2021-09-07T18:41:51.000Z
|
2021-09-17T21:50:52.000Z
|
tests/hwsim/test_suite_b.py
|
kareem-wolfssl/wolfssl_hostapd
|
df2d4bae478c99086db2decc662ef440079fa63f
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_suite_b.py
|
kareem-wolfssl/wolfssl_hostapd
|
df2d4bae478c99086db2decc662ef440079fa63f
|
[
"Unlicense"
] | 2
|
2021-09-02T23:36:42.000Z
|
2021-09-19T22:53:48.000Z
|
# Suite B tests
# Copyright (c) 2014-2015, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
import logging
logger = logging.getLogger()
import hostapd
from utils import HwsimSkip, fail_test
def check_suite_b_capa(dev):
if "GCMP" not in dev[0].get_capability("pairwise"):
raise HwsimSkip("GCMP not supported")
if "BIP-GMAC-128" not in dev[0].get_capability("group_mgmt"):
raise HwsimSkip("BIP-GMAC-128 not supported")
if "WPA-EAP-SUITE-B" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("WPA-EAP-SUITE-B not supported")
check_suite_b_tls_lib(dev, level128=True)
def check_suite_b_tls_lib(dev, dhe=False, level128=False):
tls = dev[0].request("GET tls_library")
if tls.startswith("GnuTLS"):
return
if tls.startswith("wolfSSL"):
return
if not tls.startswith("OpenSSL"):
raise HwsimSkip("TLS library not supported for Suite B: " + tls)
supported = False
for ver in ['1.0.2', '1.1.0', '1.1.1']:
if "build=OpenSSL " + ver in tls and "run=OpenSSL " + ver in tls:
supported = True
break
if not dhe and not level128 and "build=OpenSSL " + ver in tls and "run=BoringSSL" in tls:
supported = True
break
if not supported:
raise HwsimSkip("OpenSSL version not supported for Suite B: " + tls)
def suite_b_ap_params():
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B",
"rsn_pairwise": "GCMP",
"group_mgmt_cipher": "BIP-GMAC-128",
"ieee80211w": "2",
"ieee8021x": "1",
"openssl_ciphers": "SUITEB128",
#"dh_file": "auth_serv/dh.conf",
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"ca_cert": "auth_serv/ec-ca.pem",
"server_cert": "auth_serv/ec-server.pem",
"private_key": "auth_serv/ec-server.key"}
return params
def test_suite_b(dev, apdev):
"""WPA2/GCMP connection at Suite B 128-bit level"""
check_suite_b_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B", ieee80211w="2",
openssl_ciphers="SUITEB128",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec-ca.pem",
client_cert="auth_serv/ec-user.pem",
private_key="auth_serv/ec-user.key",
pairwise="GCMP", group="GCMP", scan_freq="2412")
hapd.wait_sta()
tls_cipher = dev[0].get_status_field("EAP TLS cipher")
if tls_cipher != "ECDHE-ECDSA-AES128-GCM-SHA256" and \
tls_cipher != "ECDHE-ECDSA-AES-128-GCM-AEAD":
raise Exception("Unexpected TLS cipher: " + tls_cipher)
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'flags' not in bss:
raise Exception("Could not get BSS flags from BSS table")
if "[WPA2-EAP-SUITE-B-GCMP]" not in bss['flags']:
raise Exception("Unexpected BSS flags: " + bss['flags'])
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=20)
dev[0].dump_monitor()
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED",
"CTRL-EVENT-CONNECTED"], timeout=20)
if ev is None:
raise Exception("Roaming with the AP timed out")
if "CTRL-EVENT-EAP-STARTED" in ev:
raise Exception("Unexpected EAP exchange")
conf = hapd.get_config()
if conf['key_mgmt'] != 'WPA-EAP-SUITE-B':
raise Exception("Unexpected config key_mgmt: " + conf['key_mgmt'])
hapd.wait_sta()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=20)
dev[0].dump_monitor()
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED",
"CTRL-EVENT-CONNECTED"], timeout=20)
if ev is None:
raise Exception("Roaming with the AP timed out (2)")
if "CTRL-EVENT-EAP-STARTED" in ev:
raise Exception("Unexpected EAP exchange (2)")
def suite_b_as_params():
params = {}
params['ssid'] = 'as'
params['beacon_int'] = '2000'
params['radius_server_clients'] = 'auth_serv/radius_clients.conf'
params['radius_server_auth_port'] = '18129'
params['eap_server'] = '1'
params['eap_user_file'] = 'auth_serv/eap_user.conf'
params['ca_cert'] = 'auth_serv/ec-ca.pem'
params['server_cert'] = 'auth_serv/ec-server.pem'
params['private_key'] = 'auth_serv/ec-server.key'
params['openssl_ciphers'] = 'SUITEB128'
return params
def test_suite_b_radius(dev, apdev):
"""WPA2/GCMP (RADIUS) connection at Suite B 128-bit level"""
check_suite_b_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_as_params()
hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B",
"rsn_pairwise": "GCMP",
"group_mgmt_cipher": "BIP-GMAC-128",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B", ieee80211w="2",
openssl_ciphers="SUITEB128",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec-ca.pem",
client_cert="auth_serv/ec-user.pem",
private_key="auth_serv/ec-user.key",
pairwise="GCMP", group="GCMP", scan_freq="2412")
def check_suite_b_192_capa(dev, dhe=False):
if "GCMP-256" not in dev[0].get_capability("pairwise"):
raise HwsimSkip("GCMP-256 not supported")
if "BIP-GMAC-256" not in dev[0].get_capability("group_mgmt"):
raise HwsimSkip("BIP-GMAC-256 not supported")
if "WPA-EAP-SUITE-B-192" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("WPA-EAP-SUITE-B-192 not supported")
check_suite_b_tls_lib(dev, dhe=dhe)
def suite_b_192_ap_params():
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
"openssl_ciphers": "SUITEB192",
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"ca_cert": "auth_serv/ec2-ca.pem",
"server_cert": "auth_serv/ec2-server.pem",
"private_key": "auth_serv/ec2-server.key",
"tls_flags": "[ENABLE-TLSv1.3]",
"tls_session_lifetime": "3600"}
return params
def test_suite_b_192(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_192_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="ttls", identity="pap user",
ca_cert="auth_serv/ca.pem",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
phase1="tls_disable_tlsv1_0=1 tls_disable_tlsv1_1=1 tls_disable_tlsv1_2=1 tls_disable_tlsv1_3=0", phase2="auth=PAP")
tls_cipher = dev[0].get_status_field("EAP TLS cipher")
if tls_cipher != "ECDHE-ECDSA-AES256-GCM-SHA384" and \
tls_cipher != "ECDHE-ECDSA-AES-256-GCM-AEAD" and \
tls_cipher != "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384" and \
tls_cipher != "TLS_AES_256_GCM_SHA384":
raise Exception("Unexpected TLS cipher: " + tls_cipher)
cipher = dev[0].get_status_field("mgmt_group_cipher")
if cipher != "BIP-GMAC-256":
raise Exception("Unexpected mgmt_group_cipher: " + cipher)
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'flags' not in bss:
raise Exception("Could not get BSS flags from BSS table")
if "[WPA2-EAP-SUITE-B-192-GCMP-256]" not in bss['flags']:
raise Exception("Unexpected BSS flags: " + bss['flags'])
hapd.wait_sta()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=20)
dev[0].dump_monitor()
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED",
"CTRL-EVENT-CONNECTED"], timeout=20)
if ev is None:
raise Exception("Roaming with the AP timed out")
if "CTRL-EVENT-EAP-STARTED" in ev:
raise Exception("Unexpected EAP exchange")
conf = hapd.get_config()
if conf['key_mgmt'] != 'WPA-EAP-SUITE-B-192':
raise Exception("Unexpected config key_mgmt: " + conf['key_mgmt'])
hapd.wait_sta()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=20)
dev[0].dump_monitor()
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED",
"CTRL-EVENT-CONNECTED"], timeout=20)
if ev is None:
raise Exception("Roaming with the AP timed out (2)")
if "CTRL-EVENT-EAP-STARTED" in ev:
raise Exception("Unexpected EAP exchange (2)")
def test_suite_b_192_radius(dev, apdev):
"""WPA2/GCMP-256 (RADIUS) connection at Suite B 192-bit level"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/ec2-ca.pem'
params['server_cert'] = 'auth_serv/ec2-server.pem'
params['private_key'] = 'auth_serv/ec2-server.key'
params['openssl_ciphers'] = 'SUITEB192'
hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412")
def test_suite_b_192_radius_and_p256_cert(dev, apdev):
"""Suite B 192-bit level and p256 client cert"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/ec2-ca.pem'
params['server_cert'] = 'auth_serv/ec2-server.pem'
params['private_key'] = 'auth_serv/ec2-server.key'
params['openssl_ciphers'] = 'SUITEB192'
hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
#openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user-p256.pem",
private_key="auth_serv/ec2-user-p256.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("EAP-Failure not reported")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=5)
if ev is None:
raise Exception("Disconnection not reported")
if "reason=23" not in ev:
raise Exception("Unexpected disconnection reason: " + ev)
def test_suite_b_pmkid_failure(dev, apdev):
"""WPA2/GCMP connection at Suite B 128-bit level and PMKID derivation failure"""
check_suite_b_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(dev[0], 1, "rsn_pmkid_suite_b"):
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B",
ieee80211w="2",
openssl_ciphers="SUITEB128",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec-ca.pem",
client_cert="auth_serv/ec-user.pem",
private_key="auth_serv/ec-user.key",
pairwise="GCMP", group="GCMP", scan_freq="2412")
def test_suite_b_192_pmkid_failure(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and PMKID derivation failure"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_192_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(dev[0], 1, "rsn_pmkid_suite_b"):
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412")
def test_suite_b_mic_failure(dev, apdev):
"""WPA2/GCMP connection at Suite B 128-bit level and MIC derivation failure"""
check_suite_b_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(dev[0], 1, "wpa_eapol_key_mic"):
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B",
ieee80211w="2",
openssl_ciphers="SUITEB128",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec-ca.pem",
client_cert="auth_serv/ec-user.pem",
private_key="auth_serv/ec-user.key",
pairwise="GCMP", group="GCMP", scan_freq="2412",
wait_connect=False)
dev[0].wait_disconnected()
def test_suite_b_192_mic_failure(dev, apdev):
"""WPA2/GCMP connection at Suite B 192-bit level and MIC derivation failure"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_192_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(dev[0], 1, "wpa_eapol_key_mic"):
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
wait_connect=False)
dev[0].wait_disconnected()
def suite_b_192_rsa_ap_params():
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
"tls_flags": "[SUITEB]",
"dh_file": "auth_serv/dh_param_3072.pem",
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"ca_cert": "auth_serv/rsa3072-ca.pem",
"server_cert": "auth_serv/rsa3072-server.pem",
"private_key": "auth_serv/rsa3072-server.key"}
return params
def test_suite_b_192_rsa(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and RSA"""
run_suite_b_192_rsa(dev, apdev)
def test_suite_b_192_rsa_ecdhe(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and RSA (ECDHE)"""
run_suite_b_192_rsa(dev, apdev, no_dhe=True)
def test_suite_b_192_rsa_dhe(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and RSA (DHE)"""
run_suite_b_192_rsa(dev, apdev, no_ecdh=True)
def run_suite_b_192_rsa(dev, apdev, no_ecdh=False, no_dhe=False):
check_suite_b_192_capa(dev, dhe=no_ecdh)
dev[0].flush_scan_cache()
params = suite_b_192_rsa_ap_params()
if no_ecdh:
params["tls_flags"] = "[SUITEB-NO-ECDH]"
if no_dhe:
del params["dh_file"]
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
phase1="tls_suiteb=1",
eap="TLS", identity="tls user",
ca_cert="auth_serv/rsa3072-ca.pem",
client_cert="auth_serv/rsa3072-user.pem",
private_key="auth_serv/rsa3072-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412")
tls_cipher = dev[0].get_status_field("EAP TLS cipher")
if tls_cipher != "ECDHE-RSA-AES256-GCM-SHA384" and \
tls_cipher != "DHE-RSA-AES256-GCM-SHA384" and \
tls_cipher != "ECDHE-RSA-AES-256-GCM-AEAD" and \
tls_cipher != "DHE-RSA-AES-256-GCM-AEAD":
raise Exception("Unexpected TLS cipher: " + tls_cipher)
cipher = dev[0].get_status_field("mgmt_group_cipher")
if cipher != "BIP-GMAC-256":
raise Exception("Unexpected mgmt_group_cipher: " + cipher)
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'flags' not in bss:
raise Exception("Could not get BSS flags from BSS table")
if "[WPA2-EAP-SUITE-B-192-GCMP-256]" not in bss['flags']:
raise Exception("Unexpected BSS flags: " + bss['flags'])
hapd.wait_sta()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=20)
dev[0].dump_monitor()
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED",
"CTRL-EVENT-CONNECTED"], timeout=20)
if ev is None:
raise Exception("Roaming with the AP timed out")
if "CTRL-EVENT-EAP-STARTED" in ev:
raise Exception("Unexpected EAP exchange")
conf = hapd.get_config()
if conf['key_mgmt'] != 'WPA-EAP-SUITE-B-192':
raise Exception("Unexpected config key_mgmt: " + conf['key_mgmt'])
def test_suite_b_192_rsa_insufficient_key(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and RSA with insufficient key length"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_192_rsa_ap_params()
params["ca_cert"] = "auth_serv/ca.pem"
params["server_cert"] = "auth_serv/server.pem"
params["private_key"] = "auth_serv/server.key"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
phase1="tls_suiteb=1",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ca.pem",
client_cert="auth_serv/user.pem",
private_key="auth_serv/user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-TLS-CERT-ERROR"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Certificate error not reported")
if "reason=11" in ev and "err='Insufficient RSA modulus size'" in ev:
return
if "reason=7" in ev and "err='certificate uses insecure algorithm'" in ev:
return
raise Exception("Unexpected error reason: " + ev)
def test_suite_b_192_rsa_insufficient_dh(dev, apdev):
"""WPA2/GCMP-256 connection at Suite B 192-bit level and RSA with insufficient DH key length"""
check_suite_b_192_capa(dev, dhe=True)
dev[0].flush_scan_cache()
params = suite_b_192_rsa_ap_params()
params["tls_flags"] = "[SUITEB-NO-ECDH]"
params["dh_file"] = "auth_serv/dh.conf"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
phase1="tls_suiteb=1",
eap="TLS", identity="tls user",
ca_cert="auth_serv/rsa3072-ca.pem",
client_cert="auth_serv/rsa3072-user.pem",
private_key="auth_serv/rsa3072-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STATUS status='local TLS alert'",
"CTRL-EVENT-CONNECTED"],
timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("DH error not reported")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "insufficient security" not in ev and "internal error" not in ev:
raise Exception("Unexpected error reason: " + ev)
def test_suite_b_192_rsa_radius(dev, apdev):
"""WPA2/GCMP-256 (RADIUS) connection at Suite B 192-bit level and RSA (ECDHE)"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/rsa3072-ca.pem'
params['server_cert'] = 'auth_serv/rsa3072-server.pem'
params['private_key'] = 'auth_serv/rsa3072-server.key'
del params['openssl_ciphers']
params["tls_flags"] = "[SUITEB]"
hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="ECDHE-RSA-AES256-GCM-SHA384",
phase1="tls_suiteb=1",
eap="TLS", identity="tls user",
ca_cert="auth_serv/rsa3072-ca.pem",
client_cert="auth_serv/rsa3072-user.pem",
private_key="auth_serv/rsa3072-user.key",
pairwise="GCMP-256", group="GCMP-256",
group_mgmt="BIP-GMAC-256", scan_freq="2412")
tls_cipher = dev[0].get_status_field("EAP TLS cipher")
if tls_cipher != "ECDHE-RSA-AES256-GCM-SHA384" and \
tls_cipher != "ECDHE-RSA-AES-256-GCM-AEAD":
raise Exception("Unexpected TLS cipher: " + tls_cipher)
def test_suite_b_192_rsa_ecdhe_radius_rsa2048_client(dev, apdev):
"""Suite B 192-bit level and RSA (ECDHE) and RSA2048 client"""
run_suite_b_192_rsa_radius_rsa2048_client(dev, apdev, True)
def test_suite_b_192_rsa_dhe_radius_rsa2048_client(dev, apdev):
"""Suite B 192-bit level and RSA (DHE) and RSA2048 client"""
run_suite_b_192_rsa_radius_rsa2048_client(dev, apdev, False)
def run_suite_b_192_rsa_radius_rsa2048_client(dev, apdev, ecdhe):
check_suite_b_192_capa(dev, dhe=not ecdhe)
dev[0].flush_scan_cache()
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/rsa3072-ca.pem'
params['server_cert'] = 'auth_serv/rsa3072-server.pem'
params['private_key'] = 'auth_serv/rsa3072-server.key'
del params['openssl_ciphers']
if ecdhe:
params["tls_flags"] = "[SUITEB]"
ciphers = "ECDHE-RSA-AES256-GCM-SHA384"
else:
params["tls_flags"] = "[SUITEB-NO-ECDH]"
params["dh_file"] = "auth_serv/dh_param_3072.pem"
ciphers = "DHE-RSA-AES256-GCM-SHA384"
hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers=ciphers,
phase1="tls_suiteb=1",
eap="TLS", identity="tls user",
ca_cert="auth_serv/rsa3072-ca.pem",
client_cert="auth_serv/rsa3072-user-rsa2048.pem",
private_key="auth_serv/rsa3072-user-rsa2048.key",
pairwise="GCMP-256", group="GCMP-256",
group_mgmt="BIP-GMAC-256", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("EAP-Failure not reported")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=5)
if ev is None:
raise Exception("Disconnection not reported")
if "reason=23" not in ev:
raise Exception("Unexpected disconnection reason: " + ev)
def test_openssl_ecdh_curves(dev, apdev):
"""OpenSSL ECDH curve configuration"""
check_suite_b_192_capa(dev)
dev[0].flush_scan_cache()
params = suite_b_192_ap_params()
params['wpa_key_mgmt'] = "WPA-EAP"
del params['openssl_ciphers']
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
hapd.disable()
hapd.set('openssl_ecdh_curves', 'foo')
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("Invalid openssl_ecdh_curves value accepted")
hapd.set('openssl_ecdh_curves', 'P-384')
hapd.enable()
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
# Check with server enforcing P-256 and client allowing only P-384
hapd.disable()
hapd.set('openssl_ecdh_curves', 'P-256')
hapd.enable()
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("EAP failure not reported")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
| 42.482353
| 135
| 0.592322
| 3,911
| 28,888
| 4.16313
| 0.062132
| 0.052328
| 0.04201
| 0.021373
| 0.900565
| 0.884535
| 0.860214
| 0.798551
| 0.775458
| 0.765201
| 0
| 0.061452
| 0.263189
| 28,888
| 679
| 136
| 42.544919
| 0.7035
| 0.049259
| 0
| 0.751278
| 0
| 0
| 0.318961
| 0.094493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045997
| false
| 0
| 0.006814
| 0
| 0.06644
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71e943e7706ae5004e23c310931c544b6cbfef95
| 3,225
|
py
|
Python
|
tests/create_message.py
|
alekseyl1992/pyrobuf
|
dc553f2c407d3ea1ca78c6b58e149d05ff811a23
|
[
"Apache-2.0"
] | 578
|
2015-12-17T20:39:31.000Z
|
2022-02-15T05:14:03.000Z
|
tests/create_message.py
|
alekseyl1992/pyrobuf
|
dc553f2c407d3ea1ca78c6b58e149d05ff811a23
|
[
"Apache-2.0"
] | 121
|
2015-12-19T07:37:32.000Z
|
2022-02-22T05:22:55.000Z
|
tests/create_message.py
|
alekseyl1992/pyrobuf
|
dc553f2c407d3ea1ca78c6b58e149d05ff811a23
|
[
"Apache-2.0"
] | 82
|
2015-12-19T00:19:28.000Z
|
2022-02-21T09:00:21.000Z
|
import sys
if sys.version_info.major == 2:
import messages.test_message_pb2 as google_test
def create_an_test(protover=2):
# print LIB
if protover == 2:
import test_message_proto as an_test
elif protover == 3:
import test_message_3_proto as an_test
test = an_test.Test()
test.timestamp = 539395200
test.field = 10689
test.string_field = "go goats!"
for i in range(5):
test.list_fieldx.append(i * 100)
test.substruct.field1 = 12345
test.substruct.field2 = "hello"
test.substruct.field3.field1 = 1419.67
test.substruct.field3.ss2_field2 = "goodbye"
test.substruct.list.append(354.94)
obj = test.substruct.list_object.add()
obj.field1 = 3.14159
obj.ss2_field2 = "pi"
test.substruct.list_string.append("something")
test.test_ref.timestamp = 539395200
test.test_ref.field1 = 1111
test.test_ref.field2 = 1.2345
test.test_ref.field3 = "foo"
obj = test.list_ref.add()
obj.timestamp = 539395200
obj.field1 = 1111
obj.field2 = 1.2345
obj.field3 = "foo"
test.another_substruct.string_field = "what's up?"
test.another_substruct.fixed_string_field = "nothing much"
test.another_substruct.int_field = 24
test.another_substruct.another_int_field = 87
test.another_substruct.substruct_ref.timestamp = 539395200
test.another_substruct.substruct_ref.field1 = 1111
test.another_substruct.substruct_ref.field2 = 1.2345
test.another_substruct.substruct_ref.field3 = "foo"
test.req_field = -80914
test.negative_32 = -1
return test
if sys.version_info.major == 2:
def create_google_test():
test = google_test.Test()
test.timestamp = 539395200
test.field = 10689
test.string_field = "go goats!"
for i in range(5):
test.list_fieldx.append(i * 100)
test.substruct.field1 = 12345
test.substruct.field2 = "hello"
test.substruct.field3.field1 = 1419.67
test.substruct.field3.ss2_field2 = "goodbye"
test.substruct.list.append(354.94)
obj = test.substruct.list_object.add()
obj.field1 = 3.14159
obj.ss2_field2 = "pi"
test.substruct.list_string.append("something")
test.test_ref.timestamp = 539395200
test.test_ref.field1 = 1111
test.test_ref.field2 = 1.2345
test.test_ref.field3 = "foo"
obj = test.list_ref.add()
obj.timestamp = 539395200
obj.field1 = 1111
obj.field2 = 1.2345
obj.field3 = "foo"
test.another_substruct.string_field = "what's up?"
test.another_substruct.fixed_string_field = "nothing much"
test.another_substruct.int_field = 24
test.another_substruct.another_int_field = 87
test.another_substruct.substruct_ref.timestamp = 539395200
test.another_substruct.substruct_ref.field1 = 1111
test.another_substruct.substruct_ref.field2 = 1.2345
test.another_substruct.substruct_ref.field3 = "foo"
test.req_field = -80914
test.negative_32 = -1
return test
def create_buffer():
test = create_google_test()
return test.SerializeToString()
| 28.794643
| 66
| 0.666047
| 423
| 3,225
| 4.881797
| 0.184397
| 0.08523
| 0.154964
| 0.112349
| 0.872639
| 0.872639
| 0.851332
| 0.851332
| 0.851332
| 0.851332
| 0
| 0.104387
| 0.236589
| 3,225
| 111
| 67
| 29.054054
| 0.734362
| 0.002791
| 0
| 0.843373
| 0
| 0
| 0.039203
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036145
| false
| 0
| 0.048193
| 0
| 0.120482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c1290235f8e4731b32bc7bd8eba191104c2f7f1
| 25,382
|
py
|
Python
|
tests/model/hycom_test.py
|
noaa-ocs-modeling/thyme
|
98f036bd49f8f3bcfc13c0593cd887224d971ac5
|
[
"BSD-2-Clause"
] | 5
|
2019-07-09T15:18:52.000Z
|
2020-06-03T02:57:50.000Z
|
tests/model/hycom_test.py
|
noaa-ocs-modeling/thyme
|
98f036bd49f8f3bcfc13c0593cd887224d971ac5
|
[
"BSD-2-Clause"
] | null | null | null |
tests/model/hycom_test.py
|
noaa-ocs-modeling/thyme
|
98f036bd49f8f3bcfc13c0593cd887224d971ac5
|
[
"BSD-2-Clause"
] | 2
|
2019-10-10T09:54:47.000Z
|
2020-05-27T19:11:12.000Z
|
from collections import namedtuple
import numpy
import pytest
from thyme.model.hycom import vertical_interpolation
VerticalValues = namedtuple(
'VerticalValues',
['u',
'v',
'depth',
'num_x',
'num_y',
'time_index',
'target_depth_surface',
'target_depth_default',
'target_depth_deep',
'expected_u_target_depth_default',
'expected_v_target_depth_default',
'expected_u_target_depth_surface',
'expected_v_target_depth_surface',
'expected_u_target_depth_deep',
'expected_v_target_depth_deep'])
@pytest.fixture
def vertical_values():
u = numpy.ma.array(
data=[[[[-0.14429612457752228, -0.1557592898607254, -0.1639900505542755],
[-0.13332930207252502, -0.13850903511047363, -0.14700627326965332]],
[[-0.17176321148872375, -0.18649248778820038, -0.1963706910610199],
[-0.10463598370552063, -0.11853943765163422, -0.12382897734642029]],
[[-0.1860588788986206, -0.2024313062429428, -0.21306045353412628],
[-0.11270792037248611, -0.1282253861427307, -0.13397538661956787]],
[[-0.1959017515182495, -0.21337155997753143, -0.22445325553417206],
[-0.11817137151956558, -0.13474631309509277, -0.14073477685451508]],
[[-0.20371614396572113, -0.222026988863945, -0.23342110216617584],
[-0.12233472615480423, -0.13967420160770416, -0.1458158940076828]],
[[-0.21044501662254333, -0.22945916652679443, -0.2410876452922821],
[-0.12573498487472534, -0.14364899694919586, -0.1499125063419342]],
[[-0.21648438274860382, -0.23612338304519653, -0.2479390650987625],
[-0.12864403426647186, -0.14699482917785645, -0.1533738672733307]],
[[-0.22195807099342346, -0.2421712726354599, -0.2541458308696747],
[-0.13121622800827026, -0.14990080893039703, -0.15639856457710266]],
[[-0.22682039439678192, -0.24759206175804138, -0.2597068250179291],
[-0.1335427612066269, -0.15248820185661316, -0.15910528600215912]],
[[-0.23106896877288818, -0.2523146867752075, -0.2645571827888489],
[-0.13567526638507843, -0.15483982861042023, -0.16156552731990814]],
[[-0.2349419891834259, -0.25623324513435364, -0.26865893602371216],
[-0.137639582157135, -0.15701685845851898, -0.16382475197315216]],
[[-0.23808951675891876, -0.25947651267051697, -0.2720396816730499],
[-0.13947321474552155, -0.15906816720962524, -0.16592034697532654]],
[[-0.24045346677303314, -0.2621067762374878, -0.2748018801212311],
[-0.14117150008678436, -0.1610313355922699, -0.16789506375789642]],
[[-0.2426738142967224, -0.26430219411849976, -0.2770835757255554],
[-0.14282099902629852, -0.16296876966953278, -0.16980309784412384]],
[[-0.24465344846248627, -0.26616188883781433, -0.27903714776039124],
[-0.14445151388645172, -0.16493166983127594, -0.17171379923820496]],
[[-0.2462834268808365, -0.2679187059402466, -0.2808115780353546],
[-0.14620362222194672, -0.16701747477054596, -0.17371375858783722]],
[[-0.24794217944145203, -0.26969125866889954, -0.2825746238231659],
[-0.14814794063568115, -0.1692989468574524, -0.17590203881263733]],
[[-0.24987787008285522, -0.271731436252594, -0.28456488251686096],
[-0.15038220584392548, -0.17185862362384796, -0.17835474014282227]],
[[-0.25258249044418335, -0.27453669905662537, -0.28726720809936523],
[-0.15316414833068848, -0.17494668066501617, -0.18129220604896545]],
[[-0.25681057572364807, -0.2789645791053772, -0.29178279638290405],
[-0.157413512468338, -0.17946366965770721, -0.18561050295829773]],
[[-0.14429612457752228, -0.1557592898607254, -0.1639900505542755],
[-0.13332930207252502, -0.13850903511047363, -0.14700627326965332]],
[[-0.17176321148872375, -0.18649248778820038, -0.1963706910610199],
[-0.10463598370552063, -0.11853943765163422, -0.12382897734642029]],
[[-0.1860588788986206, -0.2024313062429428, -0.21306045353412628],
[-0.11270792037248611, -0.1282253861427307, -0.13397538661956787]],
[[-0.1959017515182495, -0.21337155997753143, -0.22445325553417206],
[-0.11817137151956558, -0.13474631309509277, -0.14073477685451508]],
[[-0.20371614396572113, -0.222026988863945, -0.23342110216617584],
[-0.12233472615480423, -0.13967420160770416, -0.1458158940076828]],
[[-0.21044501662254333, -0.22945916652679443, -0.2410876452922821],
[-0.12573498487472534, -0.14364899694919586, -0.1499125063419342]],
[[-0.21648438274860382, -0.23612338304519653, -0.2479390650987625],
[-0.12864403426647186, -0.14699482917785645, -0.1533738672733307]],
[[-0.22195807099342346, -0.2421712726354599, -0.2541458308696747],
[-0.13121622800827026, -0.14990080893039703, -0.15639856457710266]],
[[-0.22682039439678192, -0.24759206175804138, -0.2597068250179291],
[-0.1335427612066269, -0.15248820185661316, -0.15910528600215912]],
[[-0.23106896877288818, -0.2523146867752075, -0.2645571827888489],
[-0.13567526638507843, -0.15483982861042023, -0.16156552731990814]],
[[-0.2349419891834259, -0.25623324513435364, -0.26865893602371216],
[-0.137639582157135, -0.15701685845851898, -0.16382475197315216]],
[[-0.23808951675891876, -0.25947651267051697, -0.2720396816730499],
[-0.13947321474552155, -0.15906816720962524, -0.16592034697532654]],
[[-0.24045346677303314, -0.2621067762374878, -0.2748018801212311],
[-0.14117150008678436, -0.1610313355922699, -0.16789506375789642]],
[[-0.2426738142967224, -0.26430219411849976, -0.2770835757255554],
[-0.14282099902629852, -0.16296876966953278, -0.16980309784412384]],
[[-0.24465344846248627, -0.26616188883781433, -0.27903714776039124],
[-0.14445151388645172, -0.16493166983127594, -0.17171379923820496]],
[[-0.2462834268808365, -0.2679187059402466, -0.2808115780353546],
[-0.14620362222194672, -0.16701747477054596, -0.17371375858783722]],
[[-0.24794217944145203, -0.26969125866889954, -0.2825746238231659],
[-0.14814794063568115, -0.1692989468574524, -0.17590203881263733]],
[[-0.24987787008285522, -0.271731436252594, -0.28456488251686096],
[-0.15038220584392548, -0.17185862362384796, -0.17835474014282227]],
[[-0.25258249044418335, -0.27453669905662537, -0.28726720809936523],
[-0.15316414833068848, -0.17494668066501617, -0.18129220604896545]],
[[-0.25681057572364807, -0.2789645791053772, -0.29178279638290405],
[-0.157413512468338, -0.17946366965770721, -0.18561050295829773]]]],
mask=[[[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]]]],
fill_value=1.2676506e+30,
dtype='float32'
)
v = numpy.ma.array(
data=[[[[-0.015962883830070496, -0.014421091414988041, -0.015121867880225182],
[-0.002787810517475009, -0.0023808081168681383, 0.006570389028638601]],
[[-0.01818399503827095, -0.016664784401655197, -0.016933994367718697],
[-0.0032500678207725286, -0.002716263523325324, 0.00792204961180687]],
[[-0.01906922645866871, -0.017558123916387558, -0.017497723922133446],
[-0.0034973544534295797, -0.002890110481530428, 0.008606006391346455]],
[[-0.019496159628033638, -0.017993737012147903, -0.01763434149324894],
[-0.0036818813532590866, -0.003022562013939023, 0.009041817858815193]],
[[-0.01969190314412117, -0.018201317638158798, -0.017547737807035446],
[-0.0038449172861874104, -0.003145989030599594, 0.009346261620521545]],
[[-0.019737228751182556, -0.018264643847942352, -0.0173049159348011],
[-0.004004715010523796, -0.003275529947131872, 0.009564205072820187]],
[[-0.019665192812681198, -0.018219994381070137, -0.01693047396838665],
[-0.004172911401838064, -0.0034219594672322273, 0.009713913314044476]],
[[-0.01949373073875904, -0.01808745414018631, -0.01643970049917698],
[-0.004358743317425251, -0.0035946646239608526, 0.009800488129258156]],
[[-0.019242193549871445, -0.01788627728819847, -0.015856854617595673],
[-0.0045697917230427265, -0.0038014191668480635, 0.009822248481214046]],
[[-0.01894020289182663, -0.017642680555582047, -0.015223790891468525],
[-0.004811229649931192, -0.004046828951686621, 0.009775890968739986]],
[[-0.018637115135788918, -0.017389029264450073, -0.014595868065953255],
[-0.005084861535578966, -0.0043313330970704556, 0.00966064352542162]],
[[-0.018358834087848663, -0.017158253118395805, -0.014026599936187267],
[-0.00539231114089489, -0.004652077332139015, 0.009479362517595291]],
[[-0.018139295279979706, -0.016974788159132004, -0.013552641496062279],
[-0.005724799819290638, -0.005007237195968628, 0.009235722944140434]],
[[-0.01799042522907257, -0.016854379326105118, -0.013189395889639854],
[-0.006096957251429558, -0.0053996010683476925, 0.00892884936183691]],
[[-0.01792767457664013, -0.016808394342660904, -0.01293881144374609],
[-0.006511836312711239, -0.005842543672770262, 0.008547094650566578]],
[[-0.017963677644729614, -0.016854533925652504, -0.012803212739527225],
[-0.007028862368315458, -0.0063656410202383995, 0.00805988721549511]],
[[-0.01813627779483795, -0.017030464485287666, -0.012803859077394009],
[-0.007689429447054863, -0.007028832100331783, 0.007404951844364405]],
[[-0.018535180017352104, -0.01742624118924141, -0.013013389892876148],
[-0.008611081168055534, -0.007948571816086769, 0.006469985470175743]],
[[-0.019416848197579384, -0.01828826777637005, -0.013652382418513298],
[-0.010075435042381287, -0.009396139532327652, 0.005024670157581568]],
[[-0.021592704579234123, -0.02032707817852497, -0.01557872723788023],
[-0.012770703993737698, -0.012113306671380997, 0.0024051270447671413]],
[[-0.015962883830070496, -0.014421091414988041, -0.015121867880225182],
[-0.002787810517475009, -0.0023808081168681383, 0.006570389028638601]],
[[-0.01818399503827095, -0.016664784401655197, -0.016933994367718697],
[-0.0032500678207725286, -0.002716263523325324, 0.00792204961180687]],
[[-0.01906922645866871, -0.017558123916387558, -0.017497723922133446],
[-0.0034973544534295797, -0.002890110481530428, 0.008606006391346455]],
[[-0.019496159628033638, -0.017993737012147903, -0.01763434149324894],
[-0.0036818813532590866, -0.003022562013939023, 0.009041817858815193]],
[[-0.01969190314412117, -0.018201317638158798, -0.017547737807035446],
[-0.0038449172861874104, -0.003145989030599594, 0.009346261620521545]],
[[-0.019737228751182556, -0.018264643847942352, -0.0173049159348011],
[-0.004004715010523796, -0.003275529947131872, 0.009564205072820187]],
[[-0.019665192812681198, -0.018219994381070137, -0.01693047396838665],
[-0.004172911401838064, -0.0034219594672322273, 0.009713913314044476]],
[[-0.01949373073875904, -0.01808745414018631, -0.01643970049917698],
[-0.004358743317425251, -0.0035946646239608526, 0.009800488129258156]],
[[-0.019242193549871445, -0.01788627728819847, -0.015856854617595673],
[-0.0045697917230427265, -0.0038014191668480635, 0.009822248481214046]],
[[-0.01894020289182663, -0.017642680555582047, -0.015223790891468525],
[-0.004811229649931192, -0.004046828951686621, 0.009775890968739986]],
[[-0.018637115135788918, -0.017389029264450073, -0.014595868065953255],
[-0.005084861535578966, -0.0043313330970704556, 0.00966064352542162]],
[[-0.018358834087848663, -0.017158253118395805, -0.014026599936187267],
[-0.00539231114089489, -0.004652077332139015, 0.009479362517595291]],
[[-0.018139295279979706, -0.016974788159132004, -0.013552641496062279],
[-0.005724799819290638, -0.005007237195968628, 0.009235722944140434]],
[[-0.01799042522907257, -0.016854379326105118, -0.013189395889639854],
[-0.006096957251429558, -0.0053996010683476925, 0.00892884936183691]],
[[-0.01792767457664013, -0.016808394342660904, -0.01293881144374609],
[-0.006511836312711239, -0.005842543672770262, 0.008547094650566578]],
[[-0.017963677644729614, -0.016854533925652504, -0.012803212739527225],
[-0.007028862368315458, -0.0063656410202383995, 0.00805988721549511]],
[[-0.01813627779483795, -0.017030464485287666, -0.012803859077394009],
[-0.007689429447054863, -0.007028832100331783, 0.007404951844364405]],
[[-0.018535180017352104, -0.01742624118924141, -0.013013389892876148],
[-0.008611081168055534, -0.007948571816086769, 0.006469985470175743]],
[[-0.019416848197579384, -0.01828826777637005, -0.013652382418513298],
[-0.010075435042381287, -0.009396139532327652, 0.005024670157581568]],
[[-0.021592704579234123, -0.02032707817852497, -0.01557872723788023],
[-0.012770703993737698, -0.012113306671380997, 0.0024051270447671413]]]],
mask=[[[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]],
[[False, False, False],
[False, False, False]]]],
fill_value=1.2676506e+30,
dtype='float32'
)
depth = numpy.array(
[
0, 2, 4, 6, 8, 10, 12, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100, 125, 150, 200, 250, 300, 350,
400, 500, 600, 700, 800, 900, 1000, 1250, 1500, 2000, 2500, 3000, 4000, 5000
]
)
num_x = 3
num_y = 2
time_index = 0
target_depth_surface = 0
target_depth_default = 4.5
target_depth_deep = 10
expected_u_target_depth_default = numpy.ma.array(
data=[[-0.1885196, -0.20516637, -0.21590865],
[-0.11407378, -0.12985562, -0.13566523]],
mask=False,
fill_value=1e+20)
expected_v_target_depth_default = numpy.ma.array(
data=[[-0.01917596, -0.01766703, -0.01753188],
[-0.00354349, -0.00292322, 0.00871496]],
mask=False,
fill_value=1e+20)
expected_u_target_depth_surface = numpy.ma.array(
data=[[-0.14429612, -0.15575929, -0.16399005],
[-0.1333293, -0.13850904, -0.14700627]],
mask=False,
fill_value=1e+20)
expected_v_target_depth_surface = numpy.ma.array(
data=[[-0.01596288, -0.01442109, -0.01512187],
[-0.00278781, -0.00238081, 0.00657039]],
mask=False,
fill_value=1e+20)
expected_u_target_depth_deep = numpy.ma.array(
data=[[-0.21044502, -0.22945917, -0.24108765],
[-0.12573498, -0.143649, -0.14991251]],
mask=False,
fill_value=1e+20)
expected_v_target_depth_deep = numpy.ma.array(
data=[[-0.01973723, -0.01826464, -0.01730492],
[-0.00400472, -0.00327553, 0.00956421]],
mask=False,
fill_value=1e+20)
return VerticalValues(u, v, depth, num_x, num_y, time_index, target_depth_surface, target_depth_default,
target_depth_deep, expected_u_target_depth_default, expected_v_target_depth_default,
expected_u_target_depth_surface, expected_v_target_depth_surface,
expected_u_target_depth_deep, expected_v_target_depth_deep)
def test_vertical_interpolation_target_depth(vertical_values):
"""Test vertical interpolation of u/v to default target depth."""
u_target_depth, v_target_depth = vertical_interpolation(vertical_values.u, vertical_values.v, vertical_values.depth,
vertical_values.num_x, vertical_values.num_y,
vertical_values.time_index,
vertical_values.target_depth_default)
print(f"u_target_depth_default: {u_target_depth}")
print(f"v_target_depth_default: {v_target_depth}")
assert numpy.allclose(u_target_depth, vertical_values.expected_u_target_depth_default)
assert numpy.allclose(v_target_depth, vertical_values.expected_v_target_depth_default)
def test_vertical_interpolation_target_depth_at_surface(vertical_values):
"""Test vertical interpolation of u/v to target depth at surface."""
u_target_depth, v_target_depth = vertical_interpolation(vertical_values.u, vertical_values.v,
vertical_values.depth,
vertical_values.num_x, vertical_values.num_y,
vertical_values.time_index,
vertical_values.target_depth_surface)
print(f"u_target_depth_surface: {u_target_depth}")
print(f"v_target_depth_surface: {v_target_depth}")
assert numpy.allclose(u_target_depth, vertical_values.expected_u_target_depth_surface)
assert numpy.allclose(v_target_depth, vertical_values.expected_v_target_depth_surface)
def test_vertical_interpolation_target_depth_deep(vertical_values):
"""Test vertical interpolation of u/v to deeper target depth."""
u_target_depth, v_target_depth = vertical_interpolation(vertical_values.u, vertical_values.v, vertical_values.depth,
vertical_values.num_x, vertical_values.num_y,
vertical_values.time_index,
vertical_values.target_depth_deep)
print(f"u_target_depth_deep: {u_target_depth}")
print(f"v_target_depth_deep: {v_target_depth}")
assert numpy.allclose(u_target_depth, vertical_values.expected_u_target_depth_deep)
assert numpy.allclose(v_target_depth, vertical_values.expected_v_target_depth_deep)
| 41.071197
| 120
| 0.575093
| 2,106
| 25,382
| 6.812441
| 0.174739
| 0.333171
| 0.497665
| 0.660765
| 0.946261
| 0.93713
| 0.927441
| 0.927441
| 0.910783
| 0.901373
| 0
| 0.515001
| 0.292215
| 25,382
| 617
| 121
| 41.137763
| 0.283607
| 0.007131
| 0
| 0.790588
| 0
| 0
| 0.020885
| 0.0108
| 0
| 0
| 0
| 0
| 0.014118
| 1
| 0.009412
| false
| 0
| 0.009412
| 0
| 0.021176
| 0.014118
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
92c925c11b42de76dba51b3bb3dd6e6fb8b00faf
| 19,668
|
py
|
Python
|
src/rewrites/label_preserving.py
|
j6mes/fever-attacks-emlp-2019
|
20c8c39cd91b5e0ae945e101906a50d4bbaecd06
|
[
"Apache-2.0"
] | 3
|
2019-05-04T04:33:44.000Z
|
2020-06-22T10:30:16.000Z
|
src/rewrites/label_preserving.py
|
j6mes/fever-attacks-emlp-2019
|
20c8c39cd91b5e0ae945e101906a50d4bbaecd06
|
[
"Apache-2.0"
] | null | null | null |
src/rewrites/label_preserving.py
|
j6mes/fever-attacks-emlp-2019
|
20c8c39cd91b5e0ae945e101906a50d4bbaecd06
|
[
"Apache-2.0"
] | 1
|
2022-03-28T11:38:30.000Z
|
2022-03-28T11:38:30.000Z
|
import re
import spacy
from rewrites.replacement_rule import ReplacementRule
nlp = spacy.load('en_core_web_sm')
class LabelPreservingIsAReplacementRule1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There exists a {0} called {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There exists an {0} called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.exists.a.called"
class LabelPreservingIsAReplacementRule3(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There exists a {0} that goes by the name of {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There exists an {0} that goes by the name of {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.exists.a.that.goes.by.name.of"
class LabelPreservingIsAReplacementRule2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There is a {0} called {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1))
else:
new_claim = "There is an {0} called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.is.a.called"
class LabelPreservingIsAReplacementRule4(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There exists a {0} called {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There exists an {0} called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.exists.a.called.prn"
class LabelPreservingIsAReplacementRule5(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
doc = nlp(instance["claim"])
is_person = any([e.label_ in ["PER"] for e in doc.ents])
if is_person:
return None
if matches1 is not None:
new_claim = "There exists a {0}, it goes by the name of {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There exists an {0}, it goes by the name of {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.exists.a.that.goes.by.name.of.prn"
class LabelPreservingIsAReplacementRule6(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) is a (.+)", instance["claim"])
matches2 = re.match(r"(.+) is an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
doc = nlp(instance["claim"])
is_person = any([e.label_ in ["PERSON"] for e in doc.ents])
if matches1 is not None:
if is_person:
new_claim = "There is a {0}, they are called {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1))
else:
new_claim = "There is a {0}, it is called {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1))
else:
if is_person:
new_claim = "There is an {0}, they are called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
else:
new_claim = "There is an {0}, it is called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.is.a.called.prn"
class LabelPreservingWasAReplacementRule1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There existed a {0} called {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There existed an {0} called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.existed.a.called"
class LabelPreservingWasAReplacementRule3(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There existed a {0} that went by the name of {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There existed an {0} that went by the name of {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.existed.a.that.went.by.name.of"
class LabelPreservingWasAReplacementRule2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
if matches1 is not None:
new_claim = "There was a {0} called {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1))
else:
new_claim = "There was an {0} called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.was.a.called"
class LabelPreservingWasAReplacementRule4(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
doc = nlp(instance["claim"])
is_person = any([e.label_ in ["PER"] for e in doc.ents])
if is_person:
return None
if matches1 is not None:
new_claim = "There existed a {0}, it was called {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There existed an {0}, it was called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.existed.a.called.prn"
class LabelPreservingWasAReplacementRule5(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
doc = nlp(instance["claim"])
is_person = any([e.label_ in ["PER"] for e in doc.ents])
if is_person:
return None
if matches1 is not None:
new_claim = "There existed a {0}, it went by the name of {1}.".format(matches1.group(2).replace(".",""),matches1.group(1))
else:
new_claim = "There existed an {0}, it went by the name of {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.existed.a.that.went.by.name.of.prn"
class LabelPreservingWasAReplacementRule6(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) was a (.+)", instance["claim"])
matches2 = re.match(r"(.+) was an (.+)", instance["claim"])
if matches1 is None and matches2 is None:
return None
doc = nlp(instance["claim"])
is_person = any([e.label_ in ["PER"] for e in doc.ents])
if is_person:
return None
if matches1 is not None:
new_claim = "There was a {0}, it was called {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1))
else:
new_claim = "There was an {0}, it was called {1}.".format(matches2.group(2).replace(".", ""), matches2.group(1))
instance["claim"] = new_claim
return instance
def name(self):
return "there.was.a.called.prn"
class LabelPreservingDirectedBy1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is)? directed by (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "There is a movie called {0} which is directed by {1}.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "directedby1"
class LabelPreservingDirectedBy4(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is)? directed by (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "{1} is the director of {0}.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "directedby4"
class LabelPreservingDirectedBy5(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is)? directed by (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "{1} was the director of {0}.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "directedby5"
class LabelPreservingDirectedBy2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is)? directed by (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "There is a director, {0}, who was involved in the production of {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "directedby2"
class LabelPreservingDirectedBy3(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is)? directed by (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "There is a person involved in the movie industry, {0}, who was the director of {1}.".format(matches1.group(2).replace(".", ""), matches1.group(1).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "directedby3"
class LabelPreservingStarredIn1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:starred|stars) in (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "There is a person, {0}, that starred in {1}.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "starredin1"
class LabelPreservingStarredIn2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:starred|stars) in (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "There is a person, {0}, that took a leading acting role in {1}.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "starredin2"
class LabelPreservingAmerican(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) an American (.+)", instance["claim"])
if matches1 is None:
return None
new_claim = "{0} {1} that originated from the United States.".format(matches1.group(1).replace(".", ""), matches1.group(2).replace(".", ""))
instance["claim"] = new_claim
return instance
def name(self):
return "american"
class LabelPreservingBirth1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is) born (?:in|on)? (.+)", instance["claim"])
if matches1 is None:
return None
doc = nlp(instance["claim"])
is_place = any([e.label_ in ["GPE","LOC"] for e in doc.ents])
is_time = any([e.label_ in ["TIME","DATE","ORDINAL", "CARDINAL"] for e in doc.ents])
if is_place and not is_time:
new_claim = "There exists a place, {1}, that is the birthplace of the person {0}.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
elif is_time and not is_place:
new_claim = "{1} is the approximate time at which the person {0} was born.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
else:
return None
instance["claim"] = new_claim
return instance
def name(self):
return "birth1"
class LabelPreservingBirth2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) (?:was|is) born (?:in|on)? (.+)", instance["claim"])
if matches1 is None:
return None
doc = nlp(instance["claim"])
is_place = any([e.label_ in ["GPE","LOC"] for e in doc.ents])
is_time = any([e.label_ in ["TIME","DATE","ORDINAL", "CARDINAL"] for e in doc.ents])
if is_place and not is_time:
new_claim = "There exists a place, {1}, that is where the person {0} started living.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
elif is_time and not is_place:
new_claim = "{1} is the approximate time at which the person {0} started living.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
else:
return None
instance["claim"] = new_claim
return instance
def name(self):
return "birth2"
class LabelPreservingDeath1(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) died (?:in|on) (.+)", instance["claim"])
if matches1 is None:
return None
doc = nlp(instance["claim"])
is_place = any([e.label_ in ["GPE","LOC"] for e in doc.ents])
is_time = any([e.label_ in ["TIME","DATE","ORDINAL", "CARDINAL"] for e in doc.ents])
if is_place and not is_time:
new_claim = "There exists a place, {1}, that is the place where the person {0} became deceased.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
elif is_time and not is_place:
new_claim = "{1} is the approximate time at which the person {0} became deceased.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
else:
return None
instance["claim"] = new_claim
return instance
def name(self):
return "death1"
class LabelPreservingDeath2(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) died (?:in|on) (.+)", instance["claim"])
if matches1 is None:
return None
doc = nlp(instance["claim"])
is_place = any([e.label_ in ["GPE","LOC"] for e in doc.ents])
is_time = any([e.label_ in ["TIME","DATE","ORDINAL", "CARDINAL"] for e in doc.ents])
if is_place and not is_time:
new_claim = "There exists a place, {1}, that is the place where the person {0} died.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
elif is_time and not is_place:
new_claim = "{1} is the approximate time at which the person {0} died.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
else:
return None
instance["claim"] = new_claim
return instance
def name(self):
return "death2"
class LabelPreservingDeath3(ReplacementRule):
def _process(self, instance):
matches1 = re.match(r"(.+) died (?:in|on) (.+)", instance["claim"])
if matches1 is None:
return None
doc = nlp(instance["claim"])
is_place = any([e.label_ in ["GPE","LOC"] for e in doc.ents])
is_time = any([e.label_ in ["TIME","DATE","ORDINAL", "CARDINAL"] for e in doc.ents])
if is_place and not is_time:
new_claim = "There exists a place, {1}, that is the place where the person {0} took their final breath.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
elif is_time and not is_place:
new_claim = "{1} is the approximate time at which the person {0} took their final breath.".format(matches1.group(1).replace(".", ""),
matches1.group(2).replace(".", ""))
else:
return None
instance["claim"] = new_claim
return instance
def name(self):
return "death3"
| 36.021978
| 184
| 0.565131
| 2,350
| 19,668
| 4.664255
| 0.060426
| 0.085394
| 0.052185
| 0.059392
| 0.883314
| 0.88012
| 0.875924
| 0.871362
| 0.871362
| 0.862695
| 0
| 0.027314
| 0.281472
| 19,668
| 545
| 185
| 36.088073
| 0.748302
| 0
| 0
| 0.744681
| 0
| 0.007979
| 0.197254
| 0.014594
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132979
| false
| 0
| 0.007979
| 0.066489
| 0.430851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92ea0fd8d39752368d0ad54406852745e7a65297
| 42,432
|
py
|
Python
|
tests/components/flux_led/test_light.py
|
GuyKh/core
|
859bcb6eb4dbb9a8b87b6e4e888e074502db5df1
|
[
"Apache-2.0"
] | 6
|
2020-07-07T21:51:24.000Z
|
2022-02-11T14:27:44.000Z
|
tests/components/flux_led/test_light.py
|
GuyKh/core
|
859bcb6eb4dbb9a8b87b6e4e888e074502db5df1
|
[
"Apache-2.0"
] | 100
|
2020-06-17T22:22:41.000Z
|
2022-03-31T06:24:19.000Z
|
tests/components/flux_led/test_light.py
|
GuyKh/core
|
859bcb6eb4dbb9a8b87b6e4e888e074502db5df1
|
[
"Apache-2.0"
] | 8
|
2020-09-15T02:33:39.000Z
|
2021-09-25T20:25:30.000Z
|
"""Tests for light platform."""
from datetime import timedelta
from unittest.mock import AsyncMock, Mock
from flux_led.const import (
COLOR_MODE_ADDRESSABLE as FLUX_COLOR_MODE_ADDRESSABLE,
COLOR_MODE_CCT as FLUX_COLOR_MODE_CCT,
COLOR_MODE_DIM as FLUX_COLOR_MODE_DIM,
COLOR_MODE_RGB as FLUX_COLOR_MODE_RGB,
COLOR_MODE_RGBW as FLUX_COLOR_MODE_RGBW,
COLOR_MODE_RGBWW as FLUX_COLOR_MODE_RGBWW,
COLOR_MODES_RGB_W as FLUX_COLOR_MODES_RGB_W,
)
import pytest
from homeassistant.components import flux_led
from homeassistant.components.flux_led.const import (
CONF_COLORS,
CONF_CUSTOM_EFFECT,
CONF_CUSTOM_EFFECT_COLORS,
CONF_CUSTOM_EFFECT_SPEED_PCT,
CONF_CUSTOM_EFFECT_TRANSITION,
CONF_DEVICES,
CONF_SPEED_PCT,
CONF_TRANSITION,
DOMAIN,
MODE_AUTO,
TRANSITION_JUMP,
)
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_MODE,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_EFFECT_LIST,
ATTR_HS_COLOR,
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_RGBWW_COLOR,
ATTR_SUPPORTED_COLOR_MODES,
ATTR_WHITE,
DOMAIN as LIGHT_DOMAIN,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_MODE,
CONF_NAME,
CONF_PLATFORM,
CONF_PROTOCOL,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from . import (
DEFAULT_ENTRY_TITLE,
IP_ADDRESS,
MAC_ADDRESS,
_mocked_bulb,
_patch_discovery,
_patch_wifibulb,
async_mock_device_turn_off,
async_mock_device_turn_on,
)
from tests.common import MockConfigEntry, async_fire_time_changed
async def test_light_unique_id(hass: HomeAssistant) -> None:
"""Test a light unique id."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
entity_registry = er.async_get(hass)
assert entity_registry.async_get(entity_id).unique_id == MAC_ADDRESS
state = hass.states.get(entity_id)
assert state.state == STATE_ON
async def test_light_goes_unavailable_and_recovers(hass: HomeAssistant) -> None:
"""Test a light goes unavailable and then recovers."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
entity_registry = er.async_get(hass)
assert entity_registry.async_get(entity_id).unique_id == MAC_ADDRESS
state = hass.states.get(entity_id)
assert state.state == STATE_ON
now = utcnow()
bulb.async_update = AsyncMock(side_effect=RuntimeError)
for i in range(10, 50, 10):
async_fire_time_changed(hass, now + timedelta(seconds=i))
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == STATE_UNAVAILABLE
bulb.async_update = AsyncMock()
for i in range(60, 100, 10):
async_fire_time_changed(hass, now + timedelta(seconds=i))
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == STATE_ON
async def test_light_no_unique_id(hass: HomeAssistant) -> None:
"""Test a light without a unique id."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE}
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
entity_registry = er.async_get(hass)
assert entity_registry.async_get(entity_id) is None
state = hass.states.get(entity_id)
assert state.state == STATE_ON
@pytest.mark.parametrize(
"protocol,sw_version,model_num,model",
[
("LEDENET_ORIGINAL", 1, 0x01, "Original LEDEDNET (0x35)"),
("LEDENET", 8, 0x33, "Magic Home Branded RGB Controller (0x33)"),
],
)
async def test_light_device_registry(
hass: HomeAssistant, protocol: str, sw_version: int, model_num: int, model: str
) -> None:
"""Test a light device registry entry."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.version_num = sw_version
bulb.protocol = protocol
bulb.model_num = model_num
bulb.model = model
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
device_registry = dr.async_get(hass)
device = device_registry.async_get_device(
identifiers={}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}
)
assert device.sw_version == str(sw_version)
assert device.model == model
async def test_rgb_light(hass: HomeAssistant) -> None:
"""Test an rgb light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.raw_state = bulb.raw_state._replace(model_num=0x33) # RGB only model
bulb.color_modes = {FLUX_COLOR_MODE_RGB}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(no_device=True), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
bulb.brightness = 0
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGB_COLOR: (10, 10, 30)},
blocking=True,
)
# If the bulb is off and we are using existing brightness
# it has to be at least 1 or the bulb won't turn on
bulb.async_set_levels.assert_called_with(10, 10, 30, brightness=1)
bulb.async_set_levels.reset_mock()
bulb.async_turn_on.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
# If its off and the device requires the turn on
# command before setting brightness we need to make sure its called
bulb.async_turn_on.assert_called_once()
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
await async_mock_device_turn_on(hass, bulb)
assert hass.states.get(entity_id).state == STATE_ON
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGB_COLOR: (10, 10, 30)},
blocking=True,
)
# If the bulb is on and we are using existing brightness
# and brightness was 0 it means we could not read it because
# an effect is in progress so we use 255
bulb.async_set_levels.assert_called_with(10, 10, 30, brightness=255)
bulb.async_set_levels.reset_mock()
bulb.brightness = 128
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 191, 178, brightness=128)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade"},
blocking=True,
)
bulb.async_set_effect.assert_called_with("purple_fade", 50, 50)
bulb.async_set_effect.reset_mock()
async def test_rgb_light_auto_on(hass: HomeAssistant) -> None:
"""Test an rgb light that does not need the turn on command sent."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.requires_turn_on = False
bulb.raw_state = bulb.raw_state._replace(model_num=0x33) # RGB only model
bulb.color_modes = {FLUX_COLOR_MODE_RGB}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
bulb.brightness = 0
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGB_COLOR: (10, 10, 30)},
blocking=True,
)
# If the bulb is off and we are using existing brightness
# it has to be at least 1 or the bulb won't turn on
bulb.async_turn_on.assert_not_called()
bulb.async_set_levels.assert_called_with(10, 10, 30, brightness=1)
bulb.async_set_levels.reset_mock()
bulb.async_turn_on.reset_mock()
# Should still be called with no kwargs
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
await async_mock_device_turn_on(hass, bulb)
assert hass.states.get(entity_id).state == STATE_ON
bulb.async_turn_on.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_turn_on.assert_not_called()
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGB_COLOR: (10, 10, 30)},
blocking=True,
)
# If the bulb is on and we are using existing brightness
# and brightness was 0 it means we could not read it because
# an effect is in progress so we use 255
bulb.async_turn_on.assert_not_called()
bulb.async_set_levels.assert_called_with(10, 10, 30, brightness=255)
bulb.async_set_levels.reset_mock()
bulb.brightness = 128
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)},
blocking=True,
)
bulb.async_turn_on.assert_not_called()
bulb.async_set_levels.assert_called_with(255, 191, 178, brightness=128)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_turn_on.assert_not_called()
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade"},
blocking=True,
)
bulb.async_turn_on.assert_not_called()
bulb.async_set_effect.assert_called_with("purple_fade", 50, 50)
bulb.async_set_effect.reset_mock()
async def test_rgb_cct_light(hass: HomeAssistant) -> None:
"""Test an rgb cct light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.raw_state = bulb.raw_state._replace(model_num=0x35) # RGB & CCT model
bulb.color_modes = {FLUX_COLOR_MODE_RGB, FLUX_COLOR_MODE_CCT}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 191, 178, brightness=128)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade"},
blocking=True,
)
bulb.async_set_effect.assert_called_with("purple_fade", 50, 50)
bulb.async_set_effect.reset_mock()
bulb.color_mode = FLUX_COLOR_MODE_CCT
bulb.getWhiteTemperature = Mock(return_value=(5000, 128))
bulb.color_temp = 5000
bulb.raw_state = bulb.raw_state._replace(
red=0, green=0, blue=0, warm_white=1, cool_white=2
)
await async_mock_device_turn_on(hass, bulb)
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "color_temp"
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgb"]
assert attributes[ATTR_COLOR_TEMP] == 200
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 370},
blocking=True,
)
bulb.async_set_white_temp.assert_called_with(2702, 128)
bulb.async_set_white_temp.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(255)
bulb.async_set_brightness.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 128},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(128)
bulb.async_set_brightness.reset_mock()
async def test_rgbw_light(hass: HomeAssistant) -> None:
"""Test an rgbw light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.color_modes = {FLUX_COLOR_MODE_RGBW}
bulb.color_mode = FLUX_COLOR_MODE_RGBW
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgbw"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgbw"]
assert attributes[ATTR_RGB_COLOR] == (255, 42, 42)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
bulb.is_on = True
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
state = hass.states.get(entity_id)
assert state.state == STATE_ON
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{
ATTR_ENTITY_ID: entity_id,
ATTR_RGBW_COLOR: (255, 255, 255, 255),
ATTR_BRIGHTNESS: 128,
},
blocking=True,
)
bulb.async_set_levels.assert_called_with(128, 128, 128, 128)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGBW_COLOR: (255, 255, 255, 255)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 255, 255, 255)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGBW_COLOR: (255, 191, 178, 0)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 191, 178, 0)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade", ATTR_BRIGHTNESS: 255},
blocking=True,
)
bulb.async_set_effect.assert_called_with("purple_fade", 50, 100)
bulb.async_set_effect.reset_mock()
async def test_rgb_or_w_light(hass: HomeAssistant) -> None:
"""Test an rgb or w light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.color_modes = FLUX_COLOR_MODES_RGB_W
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb", "white"]
assert attributes[ATTR_RGB_COLOR] == (255, 0, 0)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
bulb.is_on = True
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
state = hass.states.get(entity_id)
assert state.state == STATE_ON
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{
ATTR_ENTITY_ID: entity_id,
ATTR_RGB_COLOR: (255, 255, 255),
ATTR_BRIGHTNESS: 128,
},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 255, 255, brightness=128)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade", ATTR_BRIGHTNESS: 255},
blocking=True,
)
bulb.async_set_effect.assert_called_with("purple_fade", 50, 100)
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{
ATTR_ENTITY_ID: entity_id,
ATTR_WHITE: 128,
},
blocking=True,
)
bulb.async_set_levels.assert_called_with(w=128)
bulb.async_set_levels.reset_mock()
bulb.color_mode = FLUX_COLOR_MODE_DIM
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{
ATTR_ENTITY_ID: entity_id,
ATTR_BRIGHTNESS: 100,
},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
async def test_rgbcw_light(hass: HomeAssistant) -> None:
"""Test an rgbcw light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.raw_state = bulb.raw_state._replace(warm_white=1, cool_white=2)
bulb.color_modes = {FLUX_COLOR_MODE_RGBWW, FLUX_COLOR_MODE_CCT}
bulb.color_mode = FLUX_COLOR_MODE_RGBWW
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgbww"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgbww"]
assert attributes[ATTR_HS_COLOR] == (3.237, 94.51)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
bulb.is_on = True
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{
ATTR_ENTITY_ID: entity_id,
ATTR_RGBWW_COLOR: (255, 255, 255, 0, 255),
ATTR_BRIGHTNESS: 128,
},
blocking=True,
)
bulb.async_set_levels.assert_called_with(192, 192, 192, 192, 0)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGBWW_COLOR: (255, 255, 255, 255, 50)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 255, 255, 50, 255)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154},
blocking=True,
)
bulb.async_set_levels.assert_called_with(r=0, b=0, g=0, w=0, w2=127)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255},
blocking=True,
)
bulb.async_set_levels.assert_called_with(r=0, b=0, g=0, w=0, w2=255)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290},
blocking=True,
)
bulb.async_set_levels.assert_called_with(r=0, b=0, g=0, w=102, w2=25)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_RGBWW_COLOR: (255, 191, 178, 0, 0)},
blocking=True,
)
bulb.async_set_levels.assert_called_with(255, 191, 178, 0, 0)
bulb.async_set_levels.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "random"},
blocking=True,
)
bulb.async_set_effect.assert_called_once()
bulb.async_set_effect.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "purple_fade"},
blocking=True,
)
bulb.async_set_effect.assert_called_with("purple_fade", 50, 50)
bulb.async_set_effect.reset_mock()
bulb.effect = "purple_fade"
bulb.brightness = 128
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(255)
bulb.async_set_brightness.reset_mock()
async def test_white_light(hass: HomeAssistant) -> None:
"""Test a white light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.mode = "ww"
bulb.protocol = None
bulb.color_modes = {FLUX_COLOR_MODE_DIM}
bulb.color_mode = FLUX_COLOR_MODE_DIM
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "brightness"
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["brightness"]
assert ATTR_EFFECT_LIST in attributes # single channel now supports effects
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},
blocking=True,
)
bulb.async_set_brightness.assert_called_with(100)
bulb.async_set_brightness.reset_mock()
async def test_no_color_modes(hass: HomeAssistant) -> None:
"""Test a light that has no color modes defined in the database."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.mode = "ww"
bulb.protocol = None
bulb.color_modes = set()
bulb.color_mode = None
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_COLOR_MODE] == "onoff"
assert ATTR_EFFECT_LIST in attributes # single channel now supports effects
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
async def test_rgb_light_custom_effects(hass: HomeAssistant) -> None:
"""Test an rgb light with a custom effect."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
options={
CONF_MODE: MODE_AUTO,
CONF_CUSTOM_EFFECT_COLORS: "[0,0,255], [255,0,0]",
CONF_CUSTOM_EFFECT_SPEED_PCT: 88,
CONF_CUSTOM_EFFECT_TRANSITION: TRANSITION_JUMP,
},
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.color_modes = {FLUX_COLOR_MODE_RGB}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == [*bulb.effect_list, "custom"]
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "custom"},
blocking=True,
)
bulb.effect = "custom"
bulb.async_set_custom_pattern.assert_called_with(
[[0, 0, 255], [255, 0, 0]], 88, "jump"
)
bulb.async_set_custom_pattern.reset_mock()
await async_mock_device_turn_on(hass, bulb)
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_EFFECT] == "custom"
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 55, ATTR_EFFECT: "custom"},
blocking=True,
)
bulb.effect = "custom"
bulb.async_set_custom_pattern.assert_called_with(
[[0, 0, 255], [255, 0, 0]], 88, "jump"
)
bulb.async_set_custom_pattern.reset_mock()
await async_mock_device_turn_on(hass, bulb)
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_EFFECT] == "custom"
@pytest.mark.parametrize("effect_colors", [":: CANNOT BE PARSED ::", None])
async def test_rgb_light_custom_effects_invalid_colors(
hass: HomeAssistant, effect_colors: str
) -> None:
"""Test an rgb light with a invalid effect."""
options = {
CONF_MODE: MODE_AUTO,
CONF_CUSTOM_EFFECT_SPEED_PCT: 88,
CONF_CUSTOM_EFFECT_TRANSITION: TRANSITION_JUMP,
}
if effect_colors:
options[CONF_CUSTOM_EFFECT_COLORS] = effect_colors
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
options=options,
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.color_modes = {FLUX_COLOR_MODE_RGB}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
async def test_rgb_light_custom_effect_via_service(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test an rgb light with a custom effect set via the service."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.color_modes = {FLUX_COLOR_MODE_RGB}
bulb.color_mode = FLUX_COLOR_MODE_RGB
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_BRIGHTNESS] == 128
assert attributes[ATTR_COLOR_MODE] == "rgb"
assert attributes[ATTR_EFFECT_LIST] == bulb.effect_list
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["rgb"]
assert attributes[ATTR_HS_COLOR] == (0, 100)
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
DOMAIN,
"set_custom_effect",
{
ATTR_ENTITY_ID: entity_id,
CONF_COLORS: [[0, 0, 255], [255, 0, 0]],
CONF_SPEED_PCT: 30,
CONF_TRANSITION: "jump",
},
blocking=True,
)
bulb.async_set_custom_pattern.assert_called_with(
[(0, 0, 255), (255, 0, 0)], 30, "jump"
)
bulb.async_set_custom_pattern.reset_mock()
async def test_migrate_from_yaml_with_custom_effect(hass: HomeAssistant) -> None:
"""Test migrate from yaml."""
config = {
LIGHT_DOMAIN: [
{
CONF_PLATFORM: DOMAIN,
CONF_DEVICES: {
IP_ADDRESS: {
CONF_NAME: "flux_lamppost",
CONF_PROTOCOL: "ledenet",
CONF_CUSTOM_EFFECT: {
CONF_SPEED_PCT: 30,
CONF_TRANSITION: "strobe",
CONF_COLORS: [[255, 0, 0], [255, 255, 0], [0, 255, 0]],
},
}
},
}
],
}
with _patch_discovery(), _patch_wifibulb():
await async_setup_component(hass, LIGHT_DOMAIN, config)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert entries
migrated_entry = None
for entry in entries:
if entry.unique_id == MAC_ADDRESS:
migrated_entry = entry
break
assert migrated_entry is not None
assert migrated_entry.data == {
CONF_HOST: IP_ADDRESS,
CONF_NAME: "flux_lamppost",
CONF_PROTOCOL: "ledenet",
}
assert migrated_entry.options == {
CONF_MODE: "auto",
CONF_CUSTOM_EFFECT_COLORS: "[(255, 0, 0), (255, 255, 0), (0, 255, 0)]",
CONF_CUSTOM_EFFECT_SPEED_PCT: 30,
CONF_CUSTOM_EFFECT_TRANSITION: "strobe",
}
async def test_migrate_from_yaml_no_custom_effect(hass: HomeAssistant) -> None:
"""Test migrate from yaml."""
config = {
LIGHT_DOMAIN: [
{
CONF_PLATFORM: DOMAIN,
CONF_DEVICES: {
IP_ADDRESS: {
CONF_NAME: "flux_lamppost",
CONF_PROTOCOL: "ledenet",
}
},
}
],
}
with _patch_discovery(), _patch_wifibulb():
await async_setup_component(hass, LIGHT_DOMAIN, config)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert entries
migrated_entry = None
for entry in entries:
if entry.unique_id == MAC_ADDRESS:
migrated_entry = entry
break
assert migrated_entry is not None
assert migrated_entry.data == {
CONF_HOST: IP_ADDRESS,
CONF_NAME: "flux_lamppost",
CONF_PROTOCOL: "ledenet",
}
assert migrated_entry.options == {
CONF_MODE: "auto",
CONF_CUSTOM_EFFECT_COLORS: None,
CONF_CUSTOM_EFFECT_SPEED_PCT: 50,
CONF_CUSTOM_EFFECT_TRANSITION: "gradual",
}
async def test_addressable_light(hass: HomeAssistant) -> None:
"""Test an addressable light."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
bulb = _mocked_bulb()
bulb.raw_state = bulb.raw_state._replace(model_num=0x33) # RGB only model
bulb.color_modes = {FLUX_COLOR_MODE_ADDRESSABLE}
bulb.color_mode = FLUX_COLOR_MODE_ADDRESSABLE
with _patch_discovery(), _patch_wifibulb(device=bulb):
await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})
await hass.async_block_till_done()
entity_id = "light.bulb_rgbcw_ddeeff"
state = hass.states.get(entity_id)
assert state.state == STATE_ON
attributes = state.attributes
assert attributes[ATTR_COLOR_MODE] == "onoff"
assert ATTR_EFFECT_LIST in attributes
assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["onoff"]
await hass.services.async_call(
LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_off.assert_called_once()
await async_mock_device_turn_off(hass, bulb)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True
)
bulb.async_turn_on.assert_called_once()
bulb.async_turn_on.reset_mock()
await async_mock_device_turn_on(hass, bulb)
| 33.67619
| 86
| 0.685544
| 5,669
| 42,432
| 4.75463
| 0.046393
| 0.054018
| 0.040068
| 0.052237
| 0.890666
| 0.879536
| 0.856793
| 0.844364
| 0.832381
| 0.824701
| 0
| 0.020799
| 0.217053
| 42,432
| 1,259
| 87
| 33.702939
| 0.79053
| 0.019514
| 0
| 0.71575
| 0
| 0.000932
| 0.037256
| 0.009296
| 0
| 0
| 0.000783
| 0
| 0.166822
| 1
| 0
| false
| 0
| 0.013048
| 0
| 0.013048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13778d2d21da08d87b91b464c984b02c17c117ac
| 12,350
|
py
|
Python
|
tides.py
|
imikerussell/TideTimes
|
a0b167bca26247aa892d23d9bb04bb8330fd11c5
|
[
"MIT"
] | 17
|
2017-01-07T10:56:39.000Z
|
2021-09-07T10:19:52.000Z
|
tides.py
|
imikerussell/TideTimes
|
a0b167bca26247aa892d23d9bb04bb8330fd11c5
|
[
"MIT"
] | null | null | null |
tides.py
|
imikerussell/TideTimes
|
a0b167bca26247aa892d23d9bb04bb8330fd11c5
|
[
"MIT"
] | 4
|
2017-01-13T06:23:29.000Z
|
2019-07-07T19:35:54.000Z
|
import requests
import time
from config import URL
from config import LOCATION
from bs4 import BeautifulSoup
raw_html = requests.get(URL).text
data = BeautifulSoup(raw_html, 'html.parser')
tl = (data.select('h2')[1])
tl = (tl.encode_contents())
tl = tl.rstrip(b' <span id="displayDate"></span>')
tl = tl.decode("utf-8")
if (data.select('td')[0].text[0:5]) == "High" or (data.select('td')[0].text[0:5]) == "Low":
hl0 = (data.select('td')[0].text[0:5])
hl = 1
else:
print ("ERROR 0")
if (data.select('td')[1].text[0:5]) == "High" or (data.select('td')[1].text[0:5]) == "Low":
hl1 = (data.select('td')[1].text[0:5])
hl = 2
else:
print ("ERROR 1")
if (data.select('td')[2].text[0:5]) == "High" or (data.select('td')[2].text[0:5]) == "Low":
hl2 = (data.select('td')[2].text[0:5])
hl = 3
else:
print ("ERROR 2")
if (data.select('td')[3].text[0:5]) == "High" or (data.select('td')[3].text[0:5]) == "Low":
hl3 = (data.select('td')[3].text[0:5])
hl = 4
else:
if (data.select('td')[15].text[0:5]) == "High" or (data.select('td')[15].text[0:5]) == "Low":
# 3 tide times today and 4 tide times tomorrow
hl3 = (data.select('td')[12].text[0:5])
hl4 = (data.select('td')[13].text[0:5])
hl5 = (data.select('td')[14].text[0:5])
tt0 = (data.select('td')[3].text[0:5])
tt1 = (data.select('td')[4].text[0:5])
tt2 = (data.select('td')[5].text[0:5])
tt3 = (data.select('td')[16].text[0:5])
tt4 = (data.select('td')[17].text[0:5])
tt5 = (data.select('td')[18].text[0:5])
th0 = (data.select('td')[6].text[0:4])
th0 = th0.rstrip()
th1 = (data.select('td')[7].text[0:4])
th1 = th1.rstrip()
th2 = (data.select('td')[8].text[0:4])
th2 = th2.rstrip()
th3 = (data.select('td')[20].text[0:4])
th3 = th3.rstrip()
th4 = (data.select('td')[21].text[0:4])
th4 = th4.rstrip()
th5 = (data.select('td')[22].text[0:4])
th5 = th5.rstrip()
text_file = open("%s/tidestore.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhl0 = '%s'" % hl0)
text_file.write("\nhl1 = '%s'" % hl1)
text_file.write("\nhl2 = '%s'" % hl2)
text_file.write("\nhl3 = '%s'" % hl3)
text_file.write("\nhl4 = '%s'" % hl4)
text_file.write("\nhl5 = '%s'" % hl5)
text_file.write("\ntt0 = '%s'" % tt0)
text_file.write("\ntt1 = '%s'" % tt1)
text_file.write("\ntt2 = '%s'" % tt2)
text_file.write("\ntt3 = '%s'" % tt3)
text_file.write("\ntt4 = '%s'" % tt4)
text_file.write("\ntt5 = '%s'" % tt5)
text_file.write("\nth0 = '%s'" % th0)
text_file.write("\nth1 = '%s'" % th1)
text_file.write("\nth2 = '%s'" % th2)
text_file.write("\nth3 = '%s'" % th3)
text_file.write("\nth4 = '%s'" % th4)
text_file.write("\nth5 = '%s'" % th5)
text_file.close()
time.sleep(150)
text_file = open("%s/tidestorex.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhlx0 = '%s'" % hl0)
text_file.write("\nhlx1 = '%s'" % hl1)
text_file.write("\nhlx2 = '%s'" % hl2)
text_file.write("\nhlx3 = '%s'" % hl3)
text_file.write("\nhlx4 = '%s'" % hl4)
text_file.write("\nhlx5 = '%s'" % hl5)
text_file.write("\nttx0 = '%s'" % tt0)
text_file.write("\nttx1 = '%s'" % tt1)
text_file.write("\nttx2 = '%s'" % tt2)
text_file.write("\nttx3 = '%s'" % tt3)
text_file.write("\nttx4 = '%s'" % tt4)
text_file.write("\nttx5 = '%s'" % tt5)
text_file.write("\nthx0 = '%s'" % th0)
text_file.write("\nthx1 = '%s'" % th1)
text_file.write("\nthx2 = '%s'" % th2)
text_file.write("\nthx3 = '%s'" % th3)
text_file.write("\nthx4 = '%s'" % th4)
text_file.write("\nthx5 = '%s'" % th5)
text_file.close()
quit()
else:
# 3 tide times today and 3 tide times tomorrow
hl3 = (data.select('td')[12].text[0:5])
hl4 = (data.select('td')[13].text[0:5])
hl5 = (data.select('td')[14].text[0:5])
tt0 = (data.select('td')[3].text[0:5])
tt1 = (data.select('td')[4].text[0:5])
tt2 = (data.select('td')[5].text[0:5])
tt3 = (data.select('td')[15].text[0:5])
tt4 = (data.select('td')[16].text[0:5])
tt5 = (data.select('td')[17].text[0:5])
th0 = (data.select('td')[6].text[0:4])
th0 = th0.rstrip()
th1 = (data.select('td')[7].text[0:4])
th1 = th1.rstrip()
th2 = (data.select('td')[8].text[0:4])
th2 = th2.rstrip()
th3 = (data.select('td')[18].text[0:4])
th3 = th3.rstrip()
th4 = (data.select('td')[19].text[0:4])
th4 = th4.rstrip()
th5 = (data.select('td')[20].text[0:4])
th5 = th5.rstrip()
text_file = open("%s/tidestore.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhl0 = '%s'" % hl0)
text_file.write("\nhl1 = '%s'" % hl1)
text_file.write("\nhl2 = '%s'" % hl2)
text_file.write("\nhl3 = '%s'" % hl3)
text_file.write("\nhl4 = '%s'" % hl4)
text_file.write("\nhl5 = '%s'" % hl5)
text_file.write("\ntt0 = '%s'" % tt0)
text_file.write("\ntt1 = '%s'" % tt1)
text_file.write("\ntt2 = '%s'" % tt2)
text_file.write("\ntt3 = '%s'" % tt3)
text_file.write("\ntt4 = '%s'" % tt4)
text_file.write("\ntt5 = '%s'" % tt5)
text_file.write("\nth0 = '%s'" % th0)
text_file.write("\nth1 = '%s'" % th1)
text_file.write("\nth2 = '%s'" % th2)
text_file.write("\nth3 = '%s'" % th3)
text_file.write("\nth4 = '%s'" % th4)
text_file.write("\nth5 = '%s'" % th5)
text_file.close()
time.sleep(150)
text_file = open("%s/tidestorex.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhlx0 = '%s'" % hl0)
text_file.write("\nhlx1 = '%s'" % hl1)
text_file.write("\nhlx2 = '%s'" % hl2)
text_file.write("\nhlx3 = '%s'" % hl3)
text_file.write("\nhlx4 = '%s'" % hl4)
text_file.write("\nhlx5 = '%s'" % hl5)
text_file.write("\nttx0 = '%s'" % tt0)
text_file.write("\nttx1 = '%s'" % tt1)
text_file.write("\nttx2 = '%s'" % tt2)
text_file.write("\nttx3 = '%s'" % tt3)
text_file.write("\nttx4 = '%s'" % tt4)
text_file.write("\nttx5 = '%s'" % tt5)
text_file.write("\nthx0 = '%s'" % th0)
text_file.write("\nthx1 = '%s'" % th1)
text_file.write("\nthx2 = '%s'" % th2)
text_file.write("\nthx3 = '%s'" % th3)
text_file.write("\nthx4 = '%s'" % th4)
text_file.write("\nthx5 = '%s'" % th5)
text_file.close()
quit()
if (data.select('td')[4].text[0:5]) == "High" or (data.select('td')[4].text[0:5]) == "Low":
hl4 = (data.select('td')[4].text[0:5])
hl = 5
else:
if (data.select('td')[18].text[0:5]) == "High" or (data.select('td')[18].text[0:5]) == "Low":
# 4 tide times today and 4 tide times tomorrow
hl4 = (data.select('td')[15].text[0:5])
hl5 = (data.select('td')[16].text[0:5])
hl6 = (data.select('td')[17].text[0:5])
tt0 = (data.select('td')[4].text[0:5])
tt1 = (data.select('td')[5].text[0:5])
tt2 = (data.select('td')[6].text[0:5])
tt3 = (data.select('td')[7].text[0:5])
tt4 = (data.select('td')[19].text[0:5])
tt5 = (data.select('td')[20].text[0:5])
tt6 = (data.select('td')[21].text[0:5])
th0 = (data.select('td')[8].text[0:4])
th0 = th0.rstrip()
th1 = (data.select('td')[9].text[0:4])
th1 = th1.rstrip()
th2 = (data.select('td')[10].text[0:4])
th2 = th2.rstrip()
th3 = (data.select('td')[11].text[0:4])
th3 = th3.rstrip()
th4 = (data.select('td')[23].text[0:4])
th4 = th4.rstrip()
th5 = (data.select('td')[24].text[0:4])
th5 = th5.rstrip()
th6 = (data.select('td')[25].text[0:4])
th6 = th6.rstrip()
text_file = open("%s/tidestore.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhl0 = '%s'" % hl0)
text_file.write("\nhl1 = '%s'" % hl1)
text_file.write("\nhl2 = '%s'" % hl2)
text_file.write("\nhl3 = '%s'" % hl3)
text_file.write("\nhl4 = '%s'" % hl4)
text_file.write("\nhl5 = '%s'" % hl5)
text_file.write("\nhl6 = '%s'" % hl6)
text_file.write("\ntt0 = '%s'" % tt0)
text_file.write("\ntt1 = '%s'" % tt1)
text_file.write("\ntt2 = '%s'" % tt2)
text_file.write("\ntt3 = '%s'" % tt3)
text_file.write("\ntt4 = '%s'" % tt4)
text_file.write("\ntt5 = '%s'" % tt5)
text_file.write("\ntt6 = '%s'" % tt6)
text_file.write("\nth0 = '%s'" % th0)
text_file.write("\nth1 = '%s'" % th1)
text_file.write("\nth2 = '%s'" % th2)
text_file.write("\nth3 = '%s'" % th3)
text_file.write("\nth4 = '%s'" % th4)
text_file.write("\nth5 = '%s'" % th5)
text_file.write("\nth6 = '%s'" % th6)
text_file.close()
time.sleep(150)
text_file = open("%s/tidestorex.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhlx0 = '%s'" % hl0)
text_file.write("\nhlx1 = '%s'" % hl1)
text_file.write("\nhlx2 = '%s'" % hl2)
text_file.write("\nhlx3 = '%s'" % hl3)
text_file.write("\nhlx4 = '%s'" % hl4)
text_file.write("\nhlx5 = '%s'" % hl5)
text_file.write("\nhlx6 = '%s'" % hl6)
text_file.write("\nttx0 = '%s'" % tt0)
text_file.write("\nttx1 = '%s'" % tt1)
text_file.write("\nttx2 = '%s'" % tt2)
text_file.write("\nttx3 = '%s'" % tt3)
text_file.write("\nttx4 = '%s'" % tt4)
text_file.write("\nttx5 = '%s'" % tt5)
text_file.write("\nttx6 = '%s'" % tt6)
text_file.write("\nthx0 = '%s'" % th0)
text_file.write("\nthx1 = '%s'" % th1)
text_file.write("\nthx2 = '%s'" % th2)
text_file.write("\nthx3 = '%s'" % th3)
text_file.write("\nthx4 = '%s'" % th4)
text_file.write("\nthx5 = '%s'" % th5)
text_file.write("\nthx6 = '%s'" % th6)
text_file.close()
else:
# 4 tide times today and 3 tide times tomorrow
hl4 = (data.select('td')[15].text[0:5])
hl5 = (data.select('td')[16].text[0:5])
hl6 = (data.select('td')[17].text[0:5])
tt0 = (data.select('td')[4].text[0:5])
tt1 = (data.select('td')[5].text[0:5])
tt2 = (data.select('td')[6].text[0:5])
tt3 = (data.select('td')[7].text[0:5])
tt4 = (data.select('td')[18].text[0:5])
tt5 = (data.select('td')[19].text[0:5])
tt6 = (data.select('td')[20].text[0:5])
th0 = (data.select('td')[8].text[0:4])
th0 = th0.rstrip()
th1 = (data.select('td')[9].text[0:4])
th1 = th1.rstrip()
th2 = (data.select('td')[10].text[0:4])
th2 = th2.rstrip()
th3 = (data.select('td')[11].text[0:4])
th3 = th3.rstrip()
th4 = (data.select('td')[21].text[0:4])
th4 = th4.rstrip()
th5 = (data.select('td')[22].text[0:4])
th5 = th5.rstrip()
th6 = (data.select('td')[23].text[0:4])
th6 = th6.rstrip()
text_file = open("%s/tidestore.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhl0 = '%s'" % hl0)
text_file.write("\nhl1 = '%s'" % hl1)
text_file.write("\nhl2 = '%s'" % hl2)
text_file.write("\nhl3 = '%s'" % hl3)
text_file.write("\nhl4 = '%s'" % hl4)
text_file.write("\nhl5 = '%s'" % hl5)
text_file.write("\nhl6 = '%s'" % hl6)
text_file.write("\ntt0 = '%s'" % tt0)
text_file.write("\ntt1 = '%s'" % tt1)
text_file.write("\ntt2 = '%s'" % tt2)
text_file.write("\ntt3 = '%s'" % tt3)
text_file.write("\ntt4 = '%s'" % tt4)
text_file.write("\ntt5 = '%s'" % tt5)
text_file.write("\ntt6 = '%s'" % tt6)
text_file.write("\nth0 = '%s'" % th0)
text_file.write("\nth1 = '%s'" % th1)
text_file.write("\nth2 = '%s'" % th2)
text_file.write("\nth3 = '%s'" % th3)
text_file.write("\nth4 = '%s'" % th4)
text_file.write("\nth5 = '%s'" % th5)
text_file.write("\nth6 = '%s'" % th6)
text_file.close()
time.sleep(150)
text_file = open("%s/tidestorex.py" % LOCATION, "w")
text_file.write("tl = '%s'" % tl)
text_file.write("\nhl = '%s'" % hl)
text_file.write("\nhlx0 = '%s'" % hl0)
text_file.write("\nhlx1 = '%s'" % hl1)
text_file.write("\nhlx2 = '%s'" % hl2)
text_file.write("\nhlx3 = '%s'" % hl3)
text_file.write("\nhlx4 = '%s'" % hl4)
text_file.write("\nhlx5 = '%s'" % hl5)
text_file.write("\nhlx6 = '%s'" % hl6)
text_file.write("\nttx0 = '%s'" % tt0)
text_file.write("\nttx1 = '%s'" % tt1)
text_file.write("\nttx2 = '%s'" % tt2)
text_file.write("\nttx3 = '%s'" % tt3)
text_file.write("\nttx4 = '%s'" % tt4)
text_file.write("\nttx5 = '%s'" % tt5)
text_file.write("\nttx6 = '%s'" % tt6)
text_file.write("\nthx0 = '%s'" % th0)
text_file.write("\nthx1 = '%s'" % th1)
text_file.write("\nthx2 = '%s'" % th2)
text_file.write("\nthx3 = '%s'" % th3)
text_file.write("\nthx4 = '%s'" % th4)
text_file.write("\nthx5 = '%s'" % th5)
text_file.write("\nthx6 = '%s'" % th6)
text_file.close()
| 37.087087
| 94
| 0.5617
| 2,002
| 12,350
| 3.36963
| 0.071429
| 0.222947
| 0.331456
| 0.015417
| 0.948266
| 0.945894
| 0.945004
| 0.880225
| 0.844649
| 0.844649
| 0
| 0.073244
| 0.162024
| 12,350
| 332
| 95
| 37.198795
| 0.578607
| 0.014494
| 0
| 0.841615
| 0
| 0
| 0.20771
| 0.001973
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015528
| 0
| 0.015528
| 0.009317
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
137cfa95469799a7b17faf667b2420ca34f8d8a6
| 19,034
|
py
|
Python
|
club_crm/api/shop.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
club_crm/api/shop.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
club_crm/api/shop.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import frappe
import dateutil
import re
import numpy as np
from frappe.utils import getdate
from datetime import datetime, date
from frappe.model.document import Document
from frappe import throw, msgprint, _
from club_crm.club_crm.doctype.cart.cart import add_cart_from_shop_online
from club_crm.api.wallet import get_balance
# Get product category list
@frappe.whitelist()
def get_category():
shop_category = frappe.get_all('Item Group', filters={'parent_item_group': "Retail Inventory", 'show_on_app':1}, fields=['name','image'], order_by="item_group_name asc")
frappe.response["message"] = {
"Shop Categories": shop_category
}
# Get product list
@frappe.whitelist()
def get_products(category,count):
today = date.today()
client = frappe.db.get("Client", {"email": frappe.session.user})
item_group = frappe.get_doc('Item Group', category)
product = []
if client.membership_status == "Member" and item_group.no_member_discount == 0:
discount = 0.0
memberships = frappe.get_all('Memberships', filters={'membership_id': client.membership_id, 'membership_status':'Active'}, fields=['*'])
if memberships:
for mem in memberships:
discount = mem.retail_discount
items = frappe.get_all('Item', filters={'item_group':category, 'disabled': 0}, fields=['*'], order_by="item_name asc")
if items:
for item in items:
price = frappe.get_all('Item Price', filters={'item_code':item.item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price:
price_1 = price[0]
description = re.sub("<.*?>", "", price_1.item_description)
reg_price = price_1.price_list_rate
mem_price = reg_price - (reg_price * discount/100.0)
member_price = mem_price//0.5*0.5
product.append({
"item_code": item.item_code,
"item_name": item.item_name,
"item_group": item.item_group,
"image": item.image,
"description": description,
"currency": price_1.currency,
"regular_price": format(reg_price, '.2f'),
"member_price": format(member_price, '.2f')
})
elif client.membership_status == "Member" and item_group.no_member_discount == 1:
items = frappe.get_all('Item', filters={'item_group':category, 'disabled': 0}, fields=['*'], order_by="item_name asc")
if items:
for item in items:
price = frappe.get_all('Item Price', filters={'item_code':item.item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price:
price_1 = price[0]
description = re.sub("<.*?>", "", price_1.item_description)
reg_price= price_1.price_list_rate
product.append({
"item_code": item.item_code,
"item_name": item.item_name,
"item_group": item.item_group,
"image": item.image,
"description": description,
"currency": price_1.currency,
"regular_price": format(reg_price, '.2f'),
"member_price": format(reg_price, '.2f')
})
else:
items = frappe.get_all('Item', filters={'item_group':category, 'disabled': 0}, fields=['*'], order_by="item_name asc")
if items:
for item in items:
price = frappe.get_all('Item Price', filters={'item_code':item.item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price:
price_1 = price[0]
description = re.sub("<.*?>", "", price_1.item_description)
reg_price= price_1.price_list_rate
product.append({
"item_code": item.item_code,
"item_name": item.item_name,
"item_group": item.item_group,
"image": item.image,
"description": description,
"currency": price_1.currency,
"regular_price": format(reg_price, '.2f')
})
if product:
total_count = len(product)
carts = frappe.get_all('Online Order', filters={'client_id':client.name, 'created_date': today, 'cart_status': 'Cart'})
if carts:
for cart in carts:
doc = frappe.get_doc('Online Order', cart.name)
frappe.response["message"] = {
"status": 1,
"status_message": "Product Details",
"total_quantity": doc.total_quantity,
"total_count": total_count,
"item": product[int(count):int(count)+16]
}
else:
frappe.response["message"] = {
"status": 1,
"status_message": "Product Details",
"total_quantity": 0,
"total_count": total_count,
"item": product[int(count):int(count)+16]
}
else:
frappe.response["message"] = {
"status": 0,
"status_message": "No products available for this category"
}
@frappe.whitelist()
def get_product(client_id,category):
client = frappe.db.get("Client", {"email": frappe.session.user})
# client = frappe.get_doc('Client', client_id)
if client.membership_status == "Member":
discount = 0.0
memberships = frappe.get_all('Memberships', filters={'membership_id': client.membership_id, 'membership_status':'Active'}, fields=['*'])
if memberships:
for mem in memberships:
discount = mem.retail_discount
items = frappe.get_all('Item', filters={'item_group':category, 'disabled': 0}, fields=['*'])
if items:
product = []
for item in items:
price = frappe.get_all('Item Price', filters={'item_code':item.item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price:
price_1 = price[0]
description = re.sub("<.*?>", "", price_1.item_description)
reg_price = float(price_1.price_list_rate)
member_price = float(reg_price) - float(reg_price * discount/100.0)
product.append({
"item_code": item.item_code,
"item_name": item.item_name,
"item_group": item.item_group,
"image": item.image,
"description": description,
"currency": price_1.currency,
"regular_price": reg_price,
"member_price": member_price
})
frappe.response["message"] = {
"status": 1,
"status_message": "Product Details",
"item": product
}
else:
frappe.response["message"] = {
"status": 0,
"status_message": "No products available for this category"
}
else:
items = frappe.get_all('Item', filters={'item_group':category, 'disabled': 0}, fields=['*'])
if items:
product=[]
for item in items:
price = frappe.get_all('Item Price', filters={'item_code':item.item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price:
price_1 = price[0]
description = re.sub("<.*?>", "", price_1.item_description)
reg_price= price_1.price_list_rate
product.append({
"item_code": item.item_code,
"item_name": item.item_name,
"item_group": item.item_group,
"image": item.image,
"description": description,
"currency": price_1.currency,
"regular_price": format(reg_price, '.2f')
})
frappe.response["message"] = {
"status": 1,
"status_message": "Product Details",
"item": product
}
else:
frappe.response["message"] = {
"status": 0,
"status_message": "No products available for this category"
}
@frappe.whitelist()
def add_to_cart(client_id, item_code, qty):
today = date.today()
discount = 0.0
client = frappe.db.get("Client", {"email": frappe.session.user})
doc = frappe.get_doc('Client', client.name)
if doc.membership_status == "Member":
if doc.membership_history:
for row in doc.membership_history:
if row.status == "Active":
mem = frappe.get_doc('Memberships', row.membership)
discount = mem.retail_discount
price_list = frappe.get_all('Item Price', filters={'item_code':item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price_list:
for price in price_list:
item_price = price.price_list_rate
carts = frappe.get_all('Online Order', filters={'client_id':client.name, 'created_date': today, 'cart_status': 'Cart'})
if carts:
for cart in carts:
doc = frappe.get_doc('Online Order', cart.name)
doc.append('item', {
'item_code': item_code,
'quantity':qty,
'rate': item_price,
'discount': discount
})
doc.save()
frappe.response["message"] = {
'name': doc.name,
'client_id': doc.client_id,
'client_name': doc.client_name,
'mobile_number': doc.mobile_number,
'membership_status': doc.membership_status,
'cart_status': doc.cart_status,
'payment_status': doc.payment_status,
'payment_method': doc.payment_method,
'total_quantity': int(doc.total_quantity),
'total_amount': doc.total_amount,
'naming_series': doc.naming_series,
'doctype': doc.doctype,
'item': doc.item
}
else:
doc = frappe.get_doc({
'doctype':'Online Order',
'client_id': client.name,
'item': [{
'item_code': item_code,
'quantity':qty,
'rate': item_price,
'discount': discount
}]
})
doc.save()
frappe.response["message"] = {
'name': doc.name,
'client_id': doc.client_id,
'client_name': doc.client_name,
'mobile_number': doc.mobile_number,
'membership_status': doc.membership_status,
'cart_status': doc.cart_status,
'payment_status': doc.payment_status,
'payment_method': doc.payment_method,
'total_quantity': int(doc.total_quantity),
'total_amount': doc.total_amount,
'naming_series': doc.naming_series,
'doctype': doc.doctype,
'item': doc.item
}
# To remove
@frappe.whitelist()
def add_to_carts(client_id, item_code, qty):
today = date.today()
discount = 0.0
client = frappe.db.get("Client", {"email": frappe.session.user})
doc = frappe.get_doc('Client', client.name)
if doc.membership_status == "Member":
if doc.membership_history:
for row in doc.membership_history:
if row.status == "Active":
mem = frappe.get_doc('Memberships', row.membership)
discount = mem.retail_discount
price_list = frappe.get_all('Item Price', filters={'item_code':item_code, 'price_list':'Standard Selling'}, fields=['*'])
if price_list:
for price in price_list:
item_price = price.price_list_rate
carts = frappe.get_all('Online Order', filters={'client_id':client.name, 'created_date': today, 'cart_status': 'Cart'})
if carts:
for cart in carts:
doc = frappe.get_doc('Online Order', cart.name)
doc.append('item', {
'item_code': item_code,
'quantity':qty,
'rate': item_price,
'discount': discount
})
doc.save()
items = []
for item in doc.item:
items.append({
'name': item.name,
'parent': item.parent,
'item_code': item.item_code,
'item_name': item.item_name,
'quantity': int(item.quantity),
'rate': int(item.rate),
'discount': item.discount,
'amount': int(item.amount)
})
frappe.response["message"] = {
'name': doc.name,
'client_id': doc.client_id,
'client_name': doc.client_name,
'mobile_number': doc.mobile_number,
'membership_status': doc.membership_status,
'cart_status': doc.cart_status,
'payment_status': doc.payment_status,
'payment_method': doc.payment_method,
'total_quantity': int(doc.total_quantity),
'total_amount': int(doc.total_amount),
'naming_series': doc.naming_series,
'doctype': doc.doctype,
'item': items
}
else:
doc = frappe.get_doc({
'doctype':'Online Order',
'client_id': client.name,
'item': [{
'item_code': item_code,
'quantity':qty,
'rate': item_price,
'discount': discount
}]
})
doc.save()
items = []
for item in doc.item:
items.append({
'name': item.name,
'parent': item.parent,
'item_code': item.item_code,
'item_name': item.item_name,
'quantity': item.quantity,
'rate': int(item.rate),
'discount': item.discount,
'amount': int(item.amount)
})
frappe.response["message"] = {
'name': doc.name,
'client_id': doc.client_id,
'client_name': doc.client_name,
'mobile_number': doc.mobile_number,
'membership_status': doc.membership_status,
'cart_status': doc.cart_status,
'payment_status': doc.payment_status,
'payment_method': doc.payment_method,
'total_quantity': int(doc.total_quantity),
'total_amount': int(doc.total_amount),
'naming_series': doc.naming_series,
'doctype': doc.doctype,
'item': items
}
@frappe.whitelist()
def delete_from_cart(document_name,item_document_name):
cart= frappe.get_doc('Online Order', document_name)
row= None
for d in cart.item:
if d.name==item_document_name:
row = d
cart.remove(row)
cart.save()
frappe.db.commit()
if cart.item:
frappe.response["message"] = {
"status": 1,
"document_name": cart.name,
"date": cart.created_date,
"payment_status": cart.payment_status,
"client_id": cart.client_id,
"total_quantity": cart.total_quantity,
"total_amount": cart.total_amount,
"items": cart.item
}
else:
frappe.db.set_value('Online Order', cart.name, {
'docstatus':2,
'cart_status': 'Cancelled'
})
frappe.db.commit()
frappe.response["message"] = {
"status": 0
}
@frappe.whitelist()
def get_cart(client_id):
today = date.today()
client = frappe.db.get("Client", {"email": frappe.session.user})
cart= frappe.get_list('Online Order', filters={'client_id': client.name, 'created_date': today, 'cart_status': 'Cart'}, fields=['*'])
if cart:
cart_1=cart[0]
doc= frappe.get_doc('Online Order', cart_1.name)
items = []
for item in doc.item:
items.append({
'name': item.name,
'parent': item.parent,
'parentfield': item.parentfield,
'item_code': item.item_code,
'item_name': item.item_name,
'quantity': item.quantity,
'rate': item.amount
})
frappe.response["message"] = {
"status": 1,
"document_name": doc.name,
"date": doc.created_date,
"payment_status": doc.payment_status,
"client_id": doc.client_id,
"total_quantity": doc.total_quantity,
"total_amount": doc.total_amount,
"items": items
}
else:
frappe.response["message"] = {
"status": 0
}
@frappe.whitelist()
def checkout(client_id, payment_method):
client = frappe.db.get("Client", {"email": frappe.session.user})
cart = frappe.get_list('Online Order', filters={'client_id': client.name, 'cart_status': 'Cart'}, fields=['*'])
if cart:
cart_1=cart[0]
doc = frappe.get_doc('Online Order', cart_1.name)
doc.cart_status = 'Check-out'
doc.payment_method = payment_method
doc.save()
to_cart = add_cart_from_shop_online(doc.client_id, doc.name)
wallet = get_balance()
frappe.response["message"] = {
"status": 1,
"document_name": to_cart.name,
"cart_status": doc.cart_status,
"payment_status": doc.payment_status,
"client_name": doc.client_name,
"total_quantity": to_cart.total_quantity,
"total_amount": to_cart.grand_total,
"wallet_balance": wallet
}
| 41.558952
| 173
| 0.507828
| 1,895
| 19,034
| 4.872823
| 0.07124
| 0.034655
| 0.035088
| 0.022525
| 0.850227
| 0.810808
| 0.802794
| 0.789799
| 0.775395
| 0.770522
| 0
| 0.006937
| 0.371441
| 19,034
| 458
| 174
| 41.558952
| 0.764878
| 0.005096
| 0
| 0.786885
| 0
| 0
| 0.172133
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018735
| false
| 0
| 0.025761
| 0
| 0.044496
| 0.002342
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
139a3184a64ac60f3c238cfe1803c091d8e4d01e
| 118
|
py
|
Python
|
LakeShore/__init__.py
|
StevenSiegl/Cryostat-GUI
|
afc96705078336285555b3e2fdddd8921c7ca3f7
|
[
"MIT"
] | null | null | null |
LakeShore/__init__.py
|
StevenSiegl/Cryostat-GUI
|
afc96705078336285555b3e2fdddd8921c7ca3f7
|
[
"MIT"
] | 1
|
2018-10-02T21:32:55.000Z
|
2018-10-02T21:32:55.000Z
|
LakeShore/__init__.py
|
StevenSiegl/Cryostat-GUI
|
afc96705078336285555b3e2fdddd8921c7ca3f7
|
[
"MIT"
] | 3
|
2018-08-27T12:50:48.000Z
|
2018-09-28T09:08:42.000Z
|
"""initialisation for package and importing purposes"""
from . import LakeShore350_Control
from . import LakeShore350
| 29.5
| 55
| 0.813559
| 13
| 118
| 7.307692
| 0.769231
| 0.210526
| 0.463158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 0.118644
| 118
| 3
| 56
| 39.333333
| 0.855769
| 0.415254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
13dfc934d460fe6def3d45501fc5c44a3825abad
| 3,041
|
py
|
Python
|
tests/test_archive_parcels.py
|
Stanley-Okwii/send-it-api
|
c86654e828e64b5f39db4ed5fad1e8889c14c6a2
|
[
"Apache-2.0"
] | null | null | null |
tests/test_archive_parcels.py
|
Stanley-Okwii/send-it-api
|
c86654e828e64b5f39db4ed5fad1e8889c14c6a2
|
[
"Apache-2.0"
] | 1
|
2018-11-11T11:35:43.000Z
|
2018-11-11T11:35:43.000Z
|
tests/test_archive_parcels.py
|
Stanley-Okwii/send-it-api
|
c86654e828e64b5f39db4ed5fad1e8889c14c6a2
|
[
"Apache-2.0"
] | 1
|
2018-11-11T11:32:06.000Z
|
2018-11-11T11:32:06.000Z
|
from tests.base import BaseTestCase
import json
import pytest
class TestArchiveParcels(BaseTestCase):
def test_admin_can_view_archived_orders(self):
"""
Test that admin can get parcels that belong to all deleted users
:return:
"""
with self.client:
admin_token = self.get_token("admin", "admin@gmail.com",
"000000", "admin")
user_token = self.get_token("user", "user@gmail.com",
"000000", "user")
self.create_new_parcel_delivery_order(
"Big money",
'3',
"950",
"Diana",
"Wandegeya",
"Kikoni",
user_token
)
self.create_new_parcel_delivery_order(
"Big Pig",
'3',
"650",
"Diana",
"Wandegeya",
"Kampala",
user_token
)
response = self.client.delete(
'api/v1/user',
content_type='application/json',
headers=dict(Authorization='Bearer ' + admin_token),
data=json.dumps(dict(
email="user@gmail.com"
))
)
response = self.client.get(
'api/v1/archive',
headers=dict(Authorization='Bearer ' + admin_token),
)
self.assertEqual(response.status_code, 200)
def test_user_can_not_view_archived_orders(self):
"""
Test that user can not get parcels that belong to all deleted users
:return:
"""
with self.client:
admin_token = self.get_token("admin", "admin@gmail.com",
"000000", "admin")
user_token = self.get_token("user", "user@gmail.com",
"000000", "user")
self.create_new_parcel_delivery_order(
"Big money",
'3',
"950",
"Diana",
"Wandegeya",
"Kikoni",
user_token
)
self.create_new_parcel_delivery_order(
"Big Pig",
'3',
"650",
"Diana",
"Wandegeya",
"Kampala",
user_token
)
response = self.client.delete(
'api/v1/user',
content_type='application/json',
headers=dict(Authorization='Bearer ' + admin_token),
data=json.dumps(dict(
email="user@gmail.com"
))
)
response = self.client.get(
'api/v1/archive',
headers=dict(Authorization='Bearer ' + user_token),
)
self.assertEqual(response.status_code, 404)
| 32.698925
| 75
| 0.436698
| 254
| 3,041
| 5.043307
| 0.271654
| 0.056206
| 0.050742
| 0.053084
| 0.884465
| 0.884465
| 0.774395
| 0.774395
| 0.774395
| 0.774395
| 0
| 0.030788
| 0.465965
| 3,041
| 92
| 76
| 33.054348
| 0.758005
| 0.049326
| 0
| 0.782051
| 0
| 0
| 0.136492
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 1
| 0.025641
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b92b0f5c43bc5ea6fa0847368962cd85a5012f45
| 127
|
py
|
Python
|
core/jedi/routes/home.py
|
arunmathaisk/Jedi
|
81b4078bd710963c079b398b5efcc070a20295d1
|
[
"MIT"
] | null | null | null |
core/jedi/routes/home.py
|
arunmathaisk/Jedi
|
81b4078bd710963c079b398b5efcc070a20295d1
|
[
"MIT"
] | null | null | null |
core/jedi/routes/home.py
|
arunmathaisk/Jedi
|
81b4078bd710963c079b398b5efcc070a20295d1
|
[
"MIT"
] | 2
|
2021-09-27T15:20:04.000Z
|
2022-02-22T01:41:18.000Z
|
from flask import Flask,render_template
from jedi import app
@app.get('/')
def home():
return render_template('home.html')
| 21.166667
| 39
| 0.740157
| 19
| 127
| 4.842105
| 0.631579
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133858
| 127
| 6
| 40
| 21.166667
| 0.836364
| 0
| 0
| 0
| 0
| 0
| 0.078125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
b957470967608c53a5b77cac8552c18a7152c40c
| 9,685
|
py
|
Python
|
tests/test_actions.py
|
sneJ-/chaostoolkit-kubernetes
|
9c7886dae8633b35e0622df6f02389f8a5728829
|
[
"Apache-2.0"
] | null | null | null |
tests/test_actions.py
|
sneJ-/chaostoolkit-kubernetes
|
9c7886dae8633b35e0622df6f02389f8a5728829
|
[
"Apache-2.0"
] | null | null | null |
tests/test_actions.py
|
sneJ-/chaostoolkit-kubernetes
|
9c7886dae8633b35e0622df6f02389f8a5728829
|
[
"Apache-2.0"
] | 3
|
2019-06-18T14:23:35.000Z
|
2021-02-21T12:37:43.000Z
|
# -*- coding: utf-8 -*-
from unittest.mock import ANY, MagicMock, patch
import pytest
from chaoslib.exceptions import ActivityFailed
from kubernetes.client.rest import ApiException
from chaosk8s.actions import start_microservice
from chaosk8s.node.actions import cordon_node, create_node, delete_nodes, \
uncordon_node, drain_nodes
@patch('chaosk8s.has_local_config_file', autospec=True)
def test_cannot_process_other_than_yaml_and_json(has_conf):
has_conf.return_value = False
path = "./tests/fixtures/invalid-k8s.txt"
with pytest.raises(ActivityFailed) as excinfo:
start_microservice(spec_path=path)
assert "cannot process {path}".format(path=path) in str(excinfo)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_create_node(cl, client, has_conf):
has_conf.return_value = False
meta = {
"cluster_name": "somevalue"
}
spec = {
"external_id": "somemetavalue"
}
node = MagicMock()
node.metadata.name = "mynode"
v1 = MagicMock()
v1.create_node.return_value = node
client.CoreV1Api.return_value = v1
res = create_node(meta, spec)
assert res.metadata.name == "mynode"
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_create_node_may_fail(cl, client, has_conf):
has_conf.return_value = False
meta = {
"cluster_name": "somevalue"
}
spec = {
"external_id": "somemetavalue"
}
v1 = MagicMock()
v1.create_node.side_effect = ApiException()
client.CoreV1Api.return_value = v1
with pytest.raises(ActivityFailed) as x:
create_node(meta, spec)
assert "Creating new node failed" in str(x)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_delete_nodes(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
res = MagicMock()
res.status = "Success"
v1.delete_node.return_value = res
delete_nodes(label_selector="k=mynode")
v1.delete_node.assert_called_with("mynode", ANY, grace_period_seconds=None)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_delete_nodes(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
res = MagicMock()
res.status = "Success"
v1.delete_node.return_value = res
delete_nodes(label_selector="k=mynode")
v1.delete_node.assert_called_with("mynode", ANY, grace_period_seconds=None)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_cordon_node_by_name(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
cordon_node(name="mynode")
body = {
"spec": {
"unschedulable": True
}
}
v1.patch_node.assert_called_with("mynode", body)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_uncordon_node_by_name(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
uncordon_node(name="mynode")
body = {
"spec": {
"unschedulable": False
}
}
v1.patch_node.assert_called_with("mynode", body)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_drain_nodes_by_name(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
owner = MagicMock()
owner.controller = True
owner.kind = "ReplicationSet"
pod = MagicMock()
pod.metadata.uid = "1"
pod.metadata.name = "apod"
pod.metadata.namespace = "default"
pod.metadata.owner_references = [owner]
pods = MagicMock()
pods.items = [pod]
v1.list_pod_for_all_namespaces.return_value = pods
new_pod = MagicMock()
new_pod.metadata.uid = "2"
new_pod.metadata.name = "apod"
new_pod.metadata.namespace = "default"
v1.read_namespaced_pod.side_effect = [
pod, new_pod
]
drain_nodes(name="mynode")
v1.create_namespaced_pod_eviction.assert_called_with(
"apod", "default", body=ANY)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_daemonsets_cannot_be_drained(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
owner = MagicMock()
owner.controller = True
owner.kind = "DaemonSet"
pod = MagicMock()
pod.metadata.uid = "1"
pod.metadata.name = "apod"
pod.metadata.namespace = "default"
pod.metadata.owner_references = [owner]
pods = MagicMock()
pods.items = [pod]
v1.list_pod_for_all_namespaces.return_value = pods
drain_nodes(name="mynode")
v1.read_namespaced_pod.assert_not_called()
v1.create_namespaced_pod_eviction.assert_not_called()
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_pod_with_local_volume_cannot_be_drained(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
owner = MagicMock()
owner.controller = True
owner.kind = "ReplicationSet"
pod = MagicMock()
pod.metadata.uid = "1"
pod.metadata.name = "apod"
pod.metadata.namespace = "default"
pod.metadata.owner_references = [owner]
volume = MagicMock()
volume.empty_dir = True
pod.spec.volumes = [volume]
pods = MagicMock()
pods.items = [pod]
v1.list_pod_for_all_namespaces.return_value = pods
drain_nodes(name="mynode")
v1.read_namespaced_pod.assert_not_called()
v1.create_namespaced_pod_eviction.assert_not_called()
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_pod_with_local_volume_cannot_be_drained_unless_forced(cl, client,
has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
owner = MagicMock()
owner.controller = True
owner.kind = "ReplicationSet"
pod = MagicMock()
pod.metadata.uid = "1"
pod.metadata.name = "apod"
pod.metadata.namespace = "default"
pod.metadata.owner_references = [owner]
pods = MagicMock()
pods.items = [pod]
v1.list_pod_for_all_namespaces.return_value = pods
new_pod = MagicMock()
new_pod.metadata.uid = "2"
new_pod.metadata.name = "apod"
new_pod.metadata.namespace = "default"
v1.read_namespaced_pod.side_effect = [
pod, new_pod
]
drain_nodes(name="mynode", delete_pods_with_local_storage=True)
v1.create_namespaced_pod_eviction.assert_called_with(
"apod", "default", body=ANY)
@patch('chaosk8s.has_local_config_file', autospec=True)
@patch('chaosk8s.node.actions.client', autospec=True)
@patch('chaosk8s.client')
def test_mirror_pod_cannot_be_drained(cl, client, has_conf):
has_conf.return_value = False
v1 = MagicMock()
client.CoreV1Api.return_value = v1
node = MagicMock()
node.metadata.name = "mynode"
result = MagicMock()
result.items = [node]
v1.list_node.return_value = result
owner = MagicMock()
owner.controller = True
owner.kind = "ReplicationSet"
pod = MagicMock()
pod.metadata.uid = "1"
pod.metadata.name = "apod"
pod.metadata.namespace = "default"
pod.metadata.owner_references = [owner]
pod.metadata.annotations = {
"kubernetes.io/config.mirror": "..."
}
pods = MagicMock()
pods.items = [pod]
v1.list_pod_for_all_namespaces.return_value = pods
drain_nodes(name="mynode")
v1.read_namespaced_pod.assert_not_called()
v1.create_namespaced_pod_eviction.assert_not_called()
| 25.689655
| 79
| 0.690036
| 1,214
| 9,685
| 5.264415
| 0.110379
| 0.068847
| 0.05852
| 0.086059
| 0.884838
| 0.851353
| 0.834924
| 0.83023
| 0.823502
| 0.823502
| 0
| 0.014038
| 0.190914
| 9,685
| 376
| 80
| 25.757979
| 0.801557
| 0.002168
| 0
| 0.8125
| 0
| 0
| 0.144587
| 0.075243
| 0
| 0
| 0
| 0
| 0.055147
| 1
| 0.044118
| false
| 0
| 0.022059
| 0
| 0.066176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b96114b2cc833bf0c90561a213f9c4a37b508148
| 102
|
py
|
Python
|
trac_captcha/test_util/__init__.py
|
FelixSchwarz/trac-captcha
|
90eb4d3b4dae297e23f09a99a91bcfabcd099dc6
|
[
"MIT"
] | 1
|
2020-10-23T14:59:42.000Z
|
2020-10-23T14:59:42.000Z
|
trac_captcha/test_util/__init__.py
|
FelixSchwarz/trac-captcha
|
90eb4d3b4dae297e23f09a99a91bcfabcd099dc6
|
[
"MIT"
] | null | null | null |
trac_captcha/test_util/__init__.py
|
FelixSchwarz/trac-captcha
|
90eb4d3b4dae297e23f09a99a91bcfabcd099dc6
|
[
"MIT"
] | null | null | null |
from trac_captcha.test_util.captcha_test import *
from trac_captcha.test_util.fake_captcha import *
| 20.4
| 49
| 0.843137
| 16
| 102
| 5
| 0.4375
| 0.4125
| 0.375
| 0.475
| 0.575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 102
| 4
| 50
| 25.5
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b9e80e5586d24452c04e69229039d7dbfa4d9bff
| 433
|
py
|
Python
|
titan/project_pkg/__init__.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/project_pkg/__init__.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/project_pkg/__init__.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
from . import (
dockercompose,
dockercompose_and_project,
dockercompose_and_service,
dockerfile,
pkg,
project,
project_and_service,
service,
service_and_docker,
vscodeproject,
)
modules = [
dockercompose,
dockercompose_and_project,
dockercompose_and_service,
dockerfile,
pkg,
project,
project_and_service,
service,
service_and_docker,
vscodeproject,
]
| 16.653846
| 30
| 0.685912
| 39
| 433
| 7.205128
| 0.282051
| 0.227758
| 0.206406
| 0.256228
| 0.939502
| 0.939502
| 0.939502
| 0.939502
| 0.939502
| 0.939502
| 0
| 0
| 0.254042
| 433
| 25
| 31
| 17.32
| 0.869969
| 0
| 0
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6a02f0d2b002415b3a349cb4ec71023c79d18da6
| 5,561
|
py
|
Python
|
Progressbar.py
|
Ernyoke/Artistic_video_GUI
|
c8b0208e1a91bf3ce37ad3bdac8a953b2ec2fad7
|
[
"MIT"
] | null | null | null |
Progressbar.py
|
Ernyoke/Artistic_video_GUI
|
c8b0208e1a91bf3ce37ad3bdac8a953b2ec2fad7
|
[
"MIT"
] | null | null | null |
Progressbar.py
|
Ernyoke/Artistic_video_GUI
|
c8b0208e1a91bf3ce37ad3bdac8a953b2ec2fad7
|
[
"MIT"
] | null | null | null |
from PyQt5.QtWidgets import QDialog
from PyQt5.Qt import pyqtSlot, pyqtSignal, Qt
from gui.Ui_ProgressbarImage import Ui_ProgressDialogImage
from gui.Ui_ProgressbarVideo import Ui_ProgressDialogVideo
from gui.Ui_ProgressbarVideoOpticalFlow import Ui_ProgressDialogVideoOpticalFlow
class ProgressBar(QDialog):
def __init__(self, parent):
super().__init__(parent)
cancel_progress = pyqtSignal()
display_stylized_image = pyqtSignal(str)
def show(self):
super().show()
self._reset()
def _reset(self):
raise NotImplementedError
def hook_up(self, artistic):
raise NotImplementedError
def unhook(self, artistic):
raise NotImplementedError
@pyqtSlot()
def cancel_btn_pressed(self):
self.cancel_progress.emit()
def set_to_ok(self):
self.ui.cancelButton.setText("OK")
self.ui.cancelButton.clicked.connect(self.close)
@pyqtSlot(str)
def set_status(self, status):
self.ui.statusLabel.setText(status)
class ProgressbarImage(ProgressBar):
def __init__(self, parent):
super().__init__(parent)
self.ui = Ui_ProgressDialogImage()
self.ui.setupUi(self)
self.ui.cancelButton.clicked.connect(self.cancel_btn_pressed)
def _reset(self):
self.ui.iterationsBar.setValue(0)
self.ui.cancelButton.setText("Cancel")
self.ui.statusLabel.setText("")
def hook_up(self, artistic):
artistic.iter_changed.connect(self.update_iter_bar)
artistic.set_status.connect(self.set_status)
artistic.frame_changed.connect(self.update_frame_bar)
self.cancel_progress.connect(artistic.stop_running, Qt.DirectConnection)
def unhook(self, artistic):
artistic.iter_changed.disconnect(self.update_iter_bar)
artistic.set_status.disconnect(self.set_status)
self.cancel_progress.disconnect(artistic.stop_running)
@pyqtSlot(int, int)
def update_iter_bar(self, current, maximum):
self.ui.iterationsBar.setMaximum(maximum)
self.ui.iterationsBar.setValue(current)
@pyqtSlot(int, int, str)
def update_frame_bar(self, current, maximum, stylized_image_path):
self.display_stylized_image.emit(stylized_image_path)
class ProgressbarVideo(ProgressBar):
def __init__(self, parent):
super().__init__(parent)
self.ui = Ui_ProgressDialogVideo()
self.ui.setupUi(self)
self.ui.cancelButton.clicked.connect(self.cancel_btn_pressed)
def _reset(self):
self.ui.iterationsBar.setValue(0)
self.ui.framesBar.setValue(0)
self.ui.cancelButton.setText("Cancel")
self.ui.statusLabel.setText("")
def hook_up(self, artistic):
artistic.iter_changed.connect(self.update_iter_bar)
artistic.frame_changed.connect(self.update_frame_bar)
artistic.set_status.connect(self.set_status)
self.cancel_progress.connect(artistic.stop_running, Qt.DirectConnection)
def unhook(self, artistic):
artistic.iter_changed.disconnect(self.update_iter_bar)
artistic.frame_changed.disconnect(self.update_frame_bar)
artistic.set_status.disconnect(self.set_status)
self.cancel_progress.disconnect(artistic.stop_running)
@pyqtSlot(int, int)
def update_iter_bar(self, current, maximum, ):
self.ui.iterationsBar.setMaximum(maximum)
self.ui.iterationsBar.setValue(current)
@pyqtSlot(int, int, str)
def update_frame_bar(self, current, maximum, stylized_image_path):
self.ui.framesBar.setMaximum(maximum)
self.ui.framesBar.setValue(current)
self.display_stylized_image.emit(stylized_image_path)
class ProgressbarVideoOpticalFlow(ProgressBar):
def __init__(self, parent):
super().__init__(parent)
self.ui = Ui_ProgressDialogVideoOpticalFlow()
self.ui.setupUi(self)
self.ui.cancelButton.clicked.connect(self.cancel_btn_pressed)
def show(self):
super().show()
self._reset()
def _reset(self):
self.ui.iterationsBar.setValue(0)
self.ui.framesBar.setValue(0)
self.ui.opticalFlowBar.setValue(0)
self.ui.cancelButton.setText("Cancel")
self.ui.statusLabel.setText("")
def hook_up(self, artistic):
artistic.iter_changed.connect(self.update_iter_bar)
artistic.frame_changed.connect(self.update_frame_bar)
artistic.flow_created.connect(self.update_flow_bar)
artistic.set_status.connect(self.set_status)
self.cancel_progress.connect(artistic.stop_running, Qt.DirectConnection)
def unhook(self, artistic):
artistic.iter_changed.disconnect(self.update_iter_bar)
artistic.frame_changed.disconnect(self.update_frame_bar)
artistic.flow_created.disconnect(self.update_flow_bar)
artistic.set_status.disconnect(self.set_status)
self.cancel_progress.disconnect(artistic.stop_running)
@pyqtSlot(int, int)
def update_iter_bar(self, current, maximum):
self.ui.iterationsBar.setMaximum(maximum)
self.ui.iterationsBar.setValue(current)
@pyqtSlot(int, int, str)
def update_frame_bar(self, current, maximum, stylized_image_path):
self.ui.framesBar.setMaximum(maximum)
self.ui.framesBar.setValue(current)
self.display_stylized_image.emit(stylized_image_path)
@pyqtSlot(int, int)
def update_flow_bar(self, current, maximum):
self.ui.opticalFlowBar.setMaximum(maximum)
self.ui.opticalFlowBar.setValue(current)
| 34.75625
| 80
| 0.714979
| 656
| 5,561
| 5.815549
| 0.109756
| 0.056619
| 0.034076
| 0.038532
| 0.786632
| 0.775098
| 0.758585
| 0.73709
| 0.717693
| 0.704325
| 0
| 0.001765
| 0.185039
| 5,561
| 160
| 81
| 34.75625
| 0.840026
| 0
| 0
| 0.758065
| 0
| 0
| 0.003596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225806
| false
| 0
| 0.040323
| 0
| 0.314516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a30199e56267f2c2a7065ee0cce29603666143b
| 6,603
|
py
|
Python
|
example_cycles/N+3ref/N3_LPT_map.py
|
naylor-b/pyCycle
|
787743b39b17443631debb145a976b0ccdee43ab
|
[
"Apache-2.0"
] | null | null | null |
example_cycles/N+3ref/N3_LPT_map.py
|
naylor-b/pyCycle
|
787743b39b17443631debb145a976b0ccdee43ab
|
[
"Apache-2.0"
] | null | null | null |
example_cycles/N+3ref/N3_LPT_map.py
|
naylor-b/pyCycle
|
787743b39b17443631debb145a976b0ccdee43ab
|
[
"Apache-2.0"
] | 2
|
2020-06-26T16:54:56.000Z
|
2020-06-26T16:58:04.000Z
|
import numpy as np
from pycycle.maps.map_data import MapData
"""Python version of CFM56 LPT map from NPSS"""
LPTMap = MapData()
# Map design point values
LPTMap.defaults = {}
LPTMap.defaults['alphaMap'] = 1.0
LPTMap.defaults['NpMap'] = 100.0
LPTMap.defaults['PRmap'] = 7.5
#effMapDes = 0.9276 # = effMap for no scaling
LPTMap.alphaMap = np.array([1.0, 2.0])
LPTMap.NpMap = np.array([60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0])
LPTMap.PRmap = np.array([3.000, 3.250, 3.500, 3.750, 4.000, 4.250, 4.500, 4.750, 5.000, 5.250, 5.500, 5.750, 6.000, 6.250, 6.500, 6.750, 7.000, 7.250, 7.500, 8.000])
LPTMap.effMap= np.array([[[0.8388, 0.8309, 0.8234, 0.8159, 0.8091, 0.8030, 0.7975, 0.7924, 0.7876, 0.7832, 0.7791, 0.7753, 0.7717, 0.7684, 0.7652, 0.7623, 0.7595, 0.7568, 0.7542, 0.7495],
[0.8878, 0.8813, 0.8745, 0.8685, 0.8629, 0.8577, 0.8528, 0.8484, 0.8443, 0.8404, 0.8368, 0.8334, 0.8302, 0.8272, 0.8242, 0.8210, 0.8179, 0.8150, 0.8122, 0.8071],
[0.9201, 0.9152, 0.9105, 0.9061, 0.9018, 0.8978, 0.8940, 0.8905, 0.8872, 0.8840, 0.8810, 0.8776, 0.8741, 0.8707, 0.8676, 0.8646, 0.8618, 0.8590, 0.8565, 0.8516],
[0.9381, 0.9360, 0.9336, 0.9310, 0.9283, 0.9257, 0.9231, 0.9206, 0.9182, 0.9153, 0.9119, 0.9087, 0.9056, 0.9027, 0.8999, 0.8973, 0.8948, 0.8924, 0.8901, 0.8858],
[0.9447, 0.9455, 0.9456, 0.9450, 0.9440, 0.9429, 0.9417, 0.9404, 0.9383, 0.9355, 0.9327, 0.9301, 0.9276, 0.9252, 0.9229, 0.9207, 0.9186, 0.9165, 0.9146, 0.9099],
[0.9415, 0.9454, 0.9479, 0.9495, 0.9504, 0.9510, 0.9512, 0.9511, 0.9492, 0.9472, 0.9452, 0.9433, 0.9414, 0.9396, 0.9378, 0.9361, 0.9344, 0.9326, 0.9304, 0.9262],
[0.9295, 0.9366, 0.9419, 0.9458, 0.9487, 0.9509, 0.9526, 0.9538, 0.9528, 0.9517, 0.9505, 0.9493, 0.9481, 0.9468, 0.9456, 0.9444, 0.9432, 0.9413, 0.9395, 0.9360]],
[[0.8388, 0.8309, 0.8234, 0.8159, 0.8091, 0.8030, 0.7975, 0.7924, 0.7876, 0.7832, 0.7791, 0.7753, 0.7717, 0.7684, 0.7652, 0.7623, 0.7595, 0.7568, 0.7542, 0.7495],
[0.8878, 0.8813, 0.8745, 0.8685, 0.8629, 0.8577, 0.8528, 0.8484, 0.8443, 0.8404, 0.8368, 0.8334, 0.8302, 0.8272, 0.8242, 0.8210, 0.8179, 0.8150, 0.8122, 0.8071],
[0.9201, 0.9152, 0.9105, 0.9061, 0.9018, 0.8978, 0.8940, 0.8905, 0.8872, 0.8840, 0.8810, 0.8776, 0.8741, 0.8707, 0.8676, 0.8646, 0.8618, 0.8590, 0.8565, 0.8516],
[0.9381, 0.9360, 0.9336, 0.9310, 0.9283, 0.9257, 0.9231, 0.9206, 0.9182, 0.9153, 0.9119, 0.9087, 0.9056, 0.9027, 0.8999, 0.8973, 0.8948, 0.8924, 0.8901, 0.8858],
[0.9447, 0.9455, 0.9456, 0.9450, 0.9440, 0.9429, 0.9417, 0.9404, 0.9383, 0.9355, 0.9327, 0.9301, 0.9276, 0.9252, 0.9229, 0.9207, 0.9186, 0.9165, 0.9146, 0.9099],
[0.9415, 0.9454, 0.9479, 0.9495, 0.9504, 0.9510, 0.9512, 0.9511, 0.9492, 0.9472, 0.9452, 0.9433, 0.9414, 0.9396, 0.9378, 0.9361, 0.9344, 0.9326, 0.9304, 0.9262],
[0.9295, 0.9366, 0.9419, 0.9458, 0.9487, 0.9509, 0.9526, 0.9538, 0.9528, 0.9517, 0.9505, 0.9493, 0.9481, 0.9468, 0.9456, 0.9444, 0.9432, 0.9413, 0.9395, 0.9360]]])
LPTMap.WpMap= np.array([[[153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812],
[153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511],
[152.799, 152.982, 153.052, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061],
[150.995, 151.316, 151.518, 151.647, 151.729, 151.781, 151.814, 151.834, 151.846, 151.852, 151.856, 151.858, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859],
[148.751, 149.107, 149.349, 149.517, 149.635, 149.719, 149.779, 149.822, 149.852, 149.872, 149.885, 149.894, 149.898, 149.899, 149.899, 149.899, 149.899, 149.899, 149.899, 149.899],
[145.352, 145.680, 145.905, 146.061, 146.169, 146.244, 146.293, 146.324, 146.339, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344],
[140.863, 141.131, 141.310, 141.428, 141.503, 141.547, 141.567, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569]],
[[153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812, 153.812],
[153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511, 153.511],
[152.799, 152.982, 153.052, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061, 153.061],
[150.995, 151.316, 151.518, 151.647, 151.729, 151.781, 151.814, 151.834, 151.846, 151.852, 151.856, 151.858, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859, 151.859],
[148.751, 149.107, 149.349, 149.517, 149.635, 149.719, 149.779, 149.822, 149.852, 149.872, 149.885, 149.894, 149.898, 149.899, 149.899, 149.899, 149.899, 149.899, 149.899, 149.899],
[145.352, 145.680, 145.905, 146.061, 146.169, 146.244, 146.293, 146.324, 146.339, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344, 146.344],
[140.863, 141.131, 141.310, 141.428, 141.503, 141.547, 141.567, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569, 141.569]]])
#LPTMap.Np_data, LPTMap.alpha_data, LPTMap.PR_data = np.meshgrid(LPTMap.Nc_vals, LPTMap.alpha_vals, LPTMap.PR_vals, sparse=False)
LPTMap.Npts = LPTMap.NpMap.size
LPTMap.units = {}
LPTMap.units['NpMap'] = 'rpm'
LPTMap.units['WpMap'] = 'lbm/s'
# format for new regular grid interpolator:
LPTMap.param_data = []
LPTMap.output_data = []
LPTMap.param_data.append({'name': 'alphaMap', 'values': LPTMap.alphaMap,
'default': 1.0, 'units': None})
LPTMap.param_data.append({'name': 'NpMap', 'values': LPTMap.NpMap,
'default': 100.0, 'units': 'rpm'})
LPTMap.param_data.append({'name': 'PRmap', 'values': LPTMap.PRmap,
'default': 7.5, 'units': None})
LPTMap.output_data.append({'name': 'WpMap', 'values': LPTMap.WpMap,
'default': np.mean(LPTMap.WpMap), 'units': 'lbm/s'})
LPTMap.output_data.append({'name': 'effMap', 'values': LPTMap.effMap,
'default': np.mean(LPTMap.effMap), 'units': None})
| 89.22973
| 206
| 0.626079
| 1,372
| 6,603
| 3.002915
| 0.19898
| 0.058252
| 0.087379
| 0.11068
| 0.778398
| 0.747573
| 0.747573
| 0.747573
| 0.747573
| 0.747573
| 0
| 0.56973
| 0.146449
| 6,603
| 73
| 207
| 90.452055
| 0.161285
| 0.036044
| 0
| 0.37037
| 0
| 0
| 0.028992
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbe5fb403b08c084a3aad5219a6608c2203092c3
| 16,927
|
py
|
Python
|
Mosaic_1.py
|
lah3/USGS-Phase-6
|
e4a9b3fadf00fcbe8889182a75d794cd6592eede
|
[
"MIT"
] | null | null | null |
Mosaic_1.py
|
lah3/USGS-Phase-6
|
e4a9b3fadf00fcbe8889182a75d794cd6592eede
|
[
"MIT"
] | null | null | null |
Mosaic_1.py
|
lah3/USGS-Phase-6
|
e4a9b3fadf00fcbe8889182a75d794cd6592eede
|
[
"MIT"
] | null | null | null |
import arcpy,os
from arcpy import env
from arcpy.sa import *
arcpy.CheckOutExtension("Spatial")
arcpy.env.overwriteOutput = True
mos_final_1 = "D:/mos_final_1.gdb"
mos_final_2 = "D:/mos_final_2.gdb"
mos_final_3 = "D:/mos_final_3.gdb"
mos_final_4 = "D:/mos_final_4.gdb"
mos_final_5 = "D:/mos_final_5.gdb"
mos_final_6 = "D:/mos_final_6.gdb"
mos_final_7 = "D:/mos_final_7.gdb"
mos_final_8 = "D:/mos_final_8.gdb"
mos_final_9 = "D:/mos_final_9.gdb"
mos_final_10 = "D:/mos_final_10.gdb"
mos_final_11 = "D:/mos_final_11.gdb"
mos_final_12 = "D:/mos_final_12.gdb"
mos_final_13 = "D:/mos_final_13.gdb"
final_1 = "D:/final_1.gdb"
final_2 = "D:/final_2.gdb"
final_3 = "D:/final_3.gdb"
final_4 = "D:/final_4.gdb"
final_5 = "D:/final_5.gdb"
final_6 = "D:/final_6.gdb"
final_7 = "D:/final_7.gdb"
final_8 = "D:/final_8.gdb"
final_9 = "D:/final_9.gdb"
final_10 = "D:/final_10.gdb"
final_11 = "D:/final_11.gdb"
final_12 = "D:/final_12.gdb"
final_13 = "D:/final_13.gdb"
#Lists
md_list = [mos_final_1, mos_final_2, mos_final_3, mos_final_4, mos_final_5, mos_final_6, mos_final_7, mos_final_8, mos_final_9, mos_final_10, mos_final_11, mos_final_12, mos_final_13]
final_list = [final_1, final_2, final_3, final_4, final_5, final_6, final_7, final_8, final_9, final_10, final_11, final_12, final_13]
class_list = ['CRP', 'FOREST', 'INR', 'IR', 'MO', 'PAS', 'TCI', 'TCT', 'TG', 'WAT', 'WLF', 'WLO', 'WLT']
# Creates mosaic datasets
for i in md_list:
if arcpy.Exists(i):
print i + ": Exists (MD)"
else:
gdb_name = os.path.basename(i)
gdb = os.path.splitext(gdb_name)[0]
arcpy.CreateFileGDB_management("D:/", gdb )
# Creates output datasets
for i in final_list:
if arcpy.Exists(i):
print i + ": Exists (Final)"
else:
gdb_name = os.path.basename(i)
gdb = os.path.splitext(gdb_name)[0]
arcpy.CreateFileGDB_management("D:/", gdb )
for f, c in zip(md_list, class_list):
coord = arcpy.SpatialReference(3857)
arcpy.CreateMosaicDataset_management(f, c, coord, "1","8_BIT_UNSIGNED")
print "---Mosaic Datasets Created---"
#Raster Lists
CRP_List = []
FOREST_List = []
INR_List = []
IR_List = []
MO_List = []
PAS_List = []
TCI_List = []
TCT_List = []
TG_List = []
WAT_List = []
WLF_List = []
WLO_List = []
WLT_List = []
arcpy.env.workspace = "D:/A__P6_FINAL_TIFFs"
for folder in arcpy.ListWorkspaces("*"):
folder_basename = os.path.basename(folder)
CoName = folder_basename.rsplit('_',1)[0]
env.workspace = os.path.join("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL")
for raster in arcpy.ListRasters("*_CRP.tif"):
if arcpy.Exists(raster):
CRP_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*FOR.tif"):
if arcpy.Exists(raster):
FOREST_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_INR.tif"):
if arcpy.Exists(raster):
INR_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_IR.tif"):
if arcpy.Exists(raster):
IR_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_MO.tif"):
if arcpy.Exists(raster):
MO_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_PAS.tif"):
if arcpy.Exists(raster):
PAS_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_TCI.tif"):
if arcpy.Exists(raster):
TCI_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_TCT.tif"):
if arcpy.Exists(raster):
TCT_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_TG.tif"):
if arcpy.Exists(raster):
TG_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_WAT.tif"):
if arcpy.Exists(raster):
WAT_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_WLF.tif"):
if arcpy.Exists(raster):
WLF_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_WLO.tif"):
if arcpy.Exists(raster):
WLO_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
for raster in arcpy.ListRasters("*_WLT.tif"):
if arcpy.Exists(raster):
WLT_List.append("D:/A__P6_FINAL_TIFFs/" + CoName + "_FINAL/" + raster)
print "---Raster lists compiled for Non-VA Counties---"
arcpy.env.workspace = "D:/A__P6_FINAL_TIFFs_VA"
for folder in arcpy.ListWorkspaces("*"):
CoName = os.path.basename(folder)
env.workspace = os.path.join("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs")
for raster in arcpy.ListRasters("*_CRP.tif"):
if arcpy.Exists(raster):
CRP_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*FOR.tif"):
if arcpy.Exists(raster):
FOREST_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_INR.tif"):
if arcpy.Exists(raster):
INR_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_IR.tif"):
if arcpy.Exists(raster):
IR_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_MO.tif"):
if arcpy.Exists(raster):
MO_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_PAS.tif"):
if arcpy.Exists(raster):
PAS_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_TCI.tif"):
if arcpy.Exists(raster):
TCI_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_TCT.tif"):
if arcpy.Exists(raster):
TCT_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_TG.tif"):
if arcpy.Exists(raster):
TG_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_WAT.tif"):
if arcpy.Exists(raster):
WAT_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_WLF.tif"):
if arcpy.Exists(raster):
WLF_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_WLO.tif"):
if arcpy.Exists(raster):
WLO_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
for raster in arcpy.ListRasters("*_WLT.tif"):
if arcpy.Exists(raster):
WLT_List.append("D:/A__P6_FINAL_TIFFs_VA/" + CoName + "/Final_Tiffs/" + raster)
print "---Raster lists compiled for VA Counties---"
#Add rasters to specific mosaic dataset
input_path = ";".join(CRP_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_1.gdb/CRP", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(FOREST_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_2.gdb/FOREST", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(INR_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_3.gdb/INR", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(IR_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_4.gdb/IR", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(MO_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_5.gdb/MO", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(PAS_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_6.gdb/PAS", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(TCI_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_7.gdb/TCI", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(TCT_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_8.gdb/TCT", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(TG_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_9.gdb/TG", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(WAT_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_10.gdb/WAT", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(WLF_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_11.gdb/WLF", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(WLO_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_12.gdb/WLO", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
input_path = ";".join(WLT_List)
arcpy.AddRastersToMosaicDataset_management("D:/mos_final_13.gdb/WLT", "Raster Dataset", input_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "", "", "", "#", "SUBFOLDERS","ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS", "")
print "---All Rasters Added---"
"""
env.snapRaster =r'G:\ImageryServer\A__Snap\Phase6_Snap.tif'
arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(3857)
output_dir = "D:/final.gdb/"
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_1.gdb/CRP", output_dir + "CRP", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- CRP Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_1.gdb/FOREST", output_dir + "FOREST", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- FOREST Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_1.gdb/INR", output_dir + "INR", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- INR Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_2.gdb/IR", output_dir + "IR", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- IR Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_2.gdb/MO", output_dir + "MO", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- MO Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_2.gdb/PAS", output_dir + "PAS", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- PAS Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_3.gdb/TCI", output_dir + "TCI", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- TCI Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_3.gdb/TCT", output_dir + "TCT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- TCT Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_3.gdb/TG", output_dir + "TG", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- TG Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_4.gdb/WAT", output_dir + "WAT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- WAT Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_4.gdb/WLF", output_dir + "WLF", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- WLF Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_4.gdb/WLO", output_dir + "WLO", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- WLO Complete %s seconds ---" % (time.time() - start_time))
start_time = time.time()
arcpy.CopyRaster_management("D:/mos_final_4.gdb/WLT", output_dir + "WLT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
print("--- WLT Complete %s seconds ---" % (time.time() - start_time))
arcpy.CopyRaster_management("D:/mos_final.gdb/CRP", output_dir + "CRP", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/FOREST", output_dir + "FOREST", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/INR", output_dir + "INR", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/IR", output_dir + "IR", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/MO", output_dir + "MO", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/PAS", output_dir + "PAS", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/TCI", output_dir + "TCI", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/TCT", output_dir + "TCT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final_3.gdb/TG", output_dir + "TG", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/WAT", output_dir + "WAT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/WLF", output_dir + "WLF", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/WLO", output_dir + "WLO", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
arcpy.CopyRaster_management("D:/mos_final.gdb/WLT", output_dir + "WLT", "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE", "", "NONE")
"""
| 53.062696
| 317
| 0.667632
| 2,321
| 16,927
| 4.537268
| 0.054718
| 0.059254
| 0.04444
| 0.070364
| 0.864495
| 0.838572
| 0.832115
| 0.758617
| 0.747033
| 0.739151
| 0
| 0.013739
| 0.122822
| 16,927
| 319
| 318
| 53.062696
| 0.695515
| 0.006203
| 0
| 0.353591
| 0
| 0
| 0.377364
| 0.113715
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016575
| null | null | 0.033149
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0337fa42eea3eafa9bd77900d0b4cab93c12ab8
| 129
|
py
|
Python
|
flash/video/__init__.py
|
Site-Command/lightning-flash
|
bfff08ded9cf193cce1cd16e7034d8005de172ae
|
[
"Apache-2.0"
] | 1
|
2021-06-01T09:59:03.000Z
|
2021-06-01T09:59:03.000Z
|
flash/video/__init__.py
|
Site-Command/lightning-flash
|
bfff08ded9cf193cce1cd16e7034d8005de172ae
|
[
"Apache-2.0"
] | null | null | null |
flash/video/__init__.py
|
Site-Command/lightning-flash
|
bfff08ded9cf193cce1cd16e7034d8005de172ae
|
[
"Apache-2.0"
] | null | null | null |
from flash.video.classification.data import VideoClassificationData
from flash.video.classification.model import VideoClassifier
| 43
| 67
| 0.891473
| 14
| 129
| 8.214286
| 0.642857
| 0.156522
| 0.243478
| 0.486957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062016
| 129
| 2
| 68
| 64.5
| 0.950413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e05771846584399f5321ca420fe00b224af19cbb
| 12,367
|
py
|
Python
|
fscli/machinelearning.py
|
fbalak/fscli
|
85ffe9834e8ddc32c392fafb82fd76c943fc5f23
|
[
"Apache-2.0"
] | null | null | null |
fscli/machinelearning.py
|
fbalak/fscli
|
85ffe9834e8ddc32c392fafb82fd76c943fc5f23
|
[
"Apache-2.0"
] | null | null | null |
fscli/machinelearning.py
|
fbalak/fscli
|
85ffe9834e8ddc32c392fafb82fd76c943fc5f23
|
[
"Apache-2.0"
] | null | null | null |
from sklearn import metrics as mx
from sklearn.model_selection import KFold
import pandas as pd
try:
from fscli import featureselection
except ImportError as err:
import featureselection
def classification(source, model, target_att, test_source="", fs_task=False):
"""Performs classification on given data.
Params:
source -- Path to the file that is used to train.
model -- Object loaded from file with trained model.
target_att -- Name of attribute in source that is considered as target.
test_source -- Path to the file that is used to test.
fs_task -- String with name of used feature selection algorithm.
"""
results = dict.fromkeys([
"score",
"model",
"removed_features",
"selected_features",
"feature_importances",
"measures"])
# Basic metrics used for classification and feature selection evaluation.
metrics = dict.fromkeys(["accuracy", "recall", "precision", "f_measure"])
metrics["accuracy"] = []
metrics["recall"] = []
metrics["precision"] = []
metrics["f_measure"] = []
results["removed_features"] = []
results["feature_importances"] = []
cfr = model
# Object for reading train data and test data
csv = pd.read_csv(source)
# Numpy array with values from source path without feature names and
# target values.
train = csv.ix[:, csv.columns != target_att].values
# List of feature names
features = csv.columns.tolist()
# Numpy array with target values
target = csv[target_att].values
if fs_task:
# Pipeline with fitted model and feature selection filter or only
# fitted model.
cfr = featureselection.get_fs_model(cfr, fs_task, train, target)
if test_source:
# Numpy array with values from test_source path without feature names
# and target values.
test_csv = pd.read_csv(test_source)
test = test_csv.ix[:, csv.columns != target_att].values
# Numpy array with test target values
test_target = test_csv[target_att].values
cfr.fit(train, target)
prediction = cfr.predict(test)
metrics["accuracy"].append(mx.accuracy_score(test_target, prediction))
metrics["precision"].append(
mx.precision_score(test_target, prediction, average="macro"))
metrics["recall"].append(
mx.recall_score(test_target, prediction, average="macro"))
metrics["f_measure"].append(
mx.f1_score(test_target, prediction, average="macro"))
else:
cv = KFold(n_splits=4, shuffle=True)
for train_idx, test_idx in cv.split(train):
cfr.fit(train[train_idx], target[train_idx])
prediction = cfr.predict(train[test_idx])
metrics["accuracy"].append(
mx.accuracy_score(target[test_idx], prediction))
metrics["precision"].append(
mx.precision_score(
target[test_idx],
prediction,
average="macro"))
metrics["recall"].append(
mx.recall_score(target[test_idx], prediction, average="macro"))
metrics["f_measure"].append(
mx.f1_score(target[test_idx], prediction, average="macro"))
# results["score"] = cfr.score(test, test_target)
if fs_task:
original_features = features[:]
if fs_task == "RFE":
selected_features = []
elif fs_task == "fromModel":
selected_features = featureselection.get_selected_features(
cfr,
original_features)
else:
selected_features = featureselection.get_selected_features(
cfr.named_steps["feature_selection"], original_features)
removed_features = [i for i in features if i not in selected_features]
results["removed_features"].append(removed_features)
results["model"] = cfr
results["metrics"] = metrics
return results
def clustering(source, model, target_att, test_source="", fs_task=False):
"""Performs clustering on given data.
Params:
source -- Path to the file that is used to train.
model -- Object loaded from file with trained model.
target_att -- Name of attribute in source that is considered as target.
test_source -- Path to the file that is used to test.
fs_task -- String with name of used feature selection algorithm.
"""
results = dict.fromkeys([
"score",
"model",
"removed_features",
"selected_features",
"feature_importances",
"measures"])
# Basic metrics used for clustering and feature selection evaluation.
metrics = dict.fromkeys(["homogeneity", "completeness", "fowlkes",
"v_measure"])
metrics["homogeneity"] = []
metrics["completeness"] = []
metrics["fowlkes"] = []
metrics["v_measure"] = []
results["removed_features"] = []
results["feature_importances"] = []
cfr = model
# Object for reading train data and test data
csv = pd.read_csv(source)
# Numpy array with values from source path without feature names and
# target values.
train = csv.ix[:, csv.columns != target_att].values
# List of feature names
features = csv.columns.tolist()
# Numpy array with target values
target = csv[target_att].values
if fs_task:
# Pipeline with fitted model and feature selection filter or only
# fitted model.
cfr = featureselection.get_fs_model(cfr, fs_task, train, target)
if test_source:
# Numpy array with values from test_source path without feature names
# and target values.
test_csv = pd.read_csv(test_source)
test = test_csv.ix[:, csv.columns != target_att].values
# Numpy array with test target values
test_target = test_csv[target_att].values
cfr.fit(train, target)
prediction = cfr.predict(test)
metrics["homogeneity"].append(
mx.homogeneity_score(test_target, prediction))
metrics["completeness"].append(
mx.completeness_score(test_target, prediction))
metrics["fowlkes"].append(
mx.fowlkes_mallows_score(test_target, prediction))
metrics["v_measure"].append(
mx.v_measure_score(test_target, prediction))
else:
cv = KFold(n_splits=4, shuffle=True)
for train_idx, test_idx in cv.split(train):
cfr.fit(train[train_idx], target[train_idx])
prediction = cfr.predict(train[test_idx])
metrics["homogeneity"].append(
mx.homogeneity_score(target[test_idx], prediction))
metrics["completeness"].append(
mx.completeness_score(target[test_idx], prediction))
metrics["fowlkes"].append(
mx.fowlkes_mallows_score(target[test_idx], prediction))
metrics["v_measure"].append(
mx.v_measure_score(target[test_idx], prediction))
# results["score"] = cfr.score(test, test_target)
if fs_task:
original_features = features[:]
if fs_task == "RFE":
selected_features = []
elif fs_task == "fromModel":
selected_features = featureselection.get_selected_features(
cfr,
original_features)
else:
selected_features = featureselection.get_selected_features(
cfr.named_steps["feature_selection"], original_features)
removed_features = [i for i in features if i not in selected_features]
results["removed_features"].append(removed_features)
results["model"] = cfr
results["metrics"] = metrics
return results
def regression(source, model, target_att, test_source="", fs_task=False):
"""Performs regression on given data.
Params:
source -- Path to the file that is used to train.
model -- Object loaded from file with trained model.
target_att -- Name of attribute in source that is considered as target.
test_source -- Path to the file that is used to test.
fs_task -- String with name of used feature selection algorithm.
"""
results = dict.fromkeys([
"score",
"model",
"removed_features",
"selected_features",
"feature_importances",
"measures"])
# Basic metrics used for regression and feature selection evaluation.
metrics = dict.fromkeys(
["explained_variance", "neg_mean_absolute_error",
"neg_mean_squared_error", "neg_mean_squared_log_error", "r2",
"neg_median_absolute_error"])
metrics["explained_variance"] = []
metrics["neg_mean_absolute_error"] = []
metrics["neg_mean_squared_error"] = []
metrics["neg_mean_squared_log_error"] = []
metrics["r2"] = []
metrics["neg_median_absolute_error"] = []
results["removed_features"] = []
results["feature_importances"] = []
cfr = model
# Object for reading train data and test data
csv = pd.read_csv(source)
# Numpy array with values from source path without feature names and
# target values.
train = csv.ix[:, csv.columns != target_att].values
# List of feature names
features = csv.columns.tolist()
# Numpy array with target values
target = csv[target_att].values
if fs_task:
# Pipeline with fitted model and feature selection filter or only
# fitted model.
cfr = featureselection.get_fs_model(cfr, fs_task, train, target)
if test_source:
# Numpy array with values from test_source path without feature names
# and target values.
test_csv = pd.read_csv(test_source)
test = test_csv.ix[:, csv.columns != target_att].values
# Numpy array with test target values
test_target = test_csv[target_att].values
cfr.fit(train, target)
prediction = cfr.predict(test)
metrics["explained_variance"].append(
mx.explained_variance_score(test_target, prediction))
metrics["neg_mean_absolute_error"].append(
mx.mean_absolute_error(test_target, prediction))
metrics["neg_mean_squared_error"].append(
mx.mean_squared_error(test_target, prediction))
metrics["neg_mean_squared_log_error"].append(
mx.mean_squared_log_error(test_target, prediction))
metrics["r2"].append(
mx.r2_score(test_target, prediction))
metrics["neg_median_absolute_error"].append(
mx.median_absolute_error(test_target, prediction))
else:
cv = KFold(n_splits=4, shuffle=True)
for train_idx, test_idx in cv.split(train):
cfr.fit(train[train_idx], target[train_idx])
prediction = cfr.predict(train[test_idx])
metrics["explained_variance"].append(
mx.explained_variance_score(target[test_idx], prediction))
metrics["neg_mean_absolute_error"].append(
mx.mean_absolute_error(target[test_idx], prediction))
metrics["neg_mean_squared_error"].append(
mx.mean_squared_error(target[test_idx], prediction))
metrics["neg_mean_squared_log_error"].append(
mx.mean_squared_log_error(target[test_idx], prediction))
metrics["r2"].append(
mx.r2_score(target[test_idx], prediction))
metrics["neg_median_absolute_error"].append(
mx.median_absolute_error(target[test_idx], prediction))
# results["score"] = cfr.score(test, target[test_idx])
if fs_task:
original_features = features[:]
if fs_task == "RFE":
selected_features = []
elif fs_task == "fromModel":
selected_features = featureselection.get_selected_features(
cfr,
original_features)
else:
selected_features = featureselection.get_selected_features(
cfr.named_steps["feature_selection"], original_features)
removed_features = [i for i in features if i not in selected_features]
results["removed_features"].append(removed_features)
results["model"] = cfr
results["metrics"] = metrics
return results
| 37.475758
| 79
| 0.637907
| 1,433
| 12,367
| 5.296581
| 0.091417
| 0.029513
| 0.025692
| 0.042424
| 0.925033
| 0.904743
| 0.868643
| 0.803821
| 0.771278
| 0.732279
| 0
| 0.001207
| 0.263039
| 12,367
| 329
| 80
| 37.589666
| 0.831578
| 0.208215
| 0
| 0.711009
| 0
| 0
| 0.13093
| 0.039776
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013761
| false
| 0
| 0.055046
| 0
| 0.082569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ec7e5b2436c09e48f77bb762f554197393bd9e2
| 1,596
|
py
|
Python
|
tests/unit/management/test_resource.py
|
stone-payments/flask-management-blueprint
|
eafec0e1560726705990ac8a37c120006011047c
|
[
"Apache-2.0"
] | 3
|
2018-03-23T21:55:38.000Z
|
2020-03-11T10:22:29.000Z
|
tests/unit/management/test_resource.py
|
stone-payments/flask-management-blueprint
|
eafec0e1560726705990ac8a37c120006011047c
|
[
"Apache-2.0"
] | 8
|
2018-03-16T17:40:28.000Z
|
2021-06-11T17:45:37.000Z
|
tests/unit/management/test_resource.py
|
stone-payments/flask-management-blueprint
|
eafec0e1560726705990ac8a37c120006011047c
|
[
"Apache-2.0"
] | 4
|
2018-06-06T20:00:34.000Z
|
2020-03-10T14:51:08.000Z
|
from http import HTTPStatus
from unittest.mock import patch
from flask_management_blueprint.management import health_status
from flask_management_blueprint.management import management_resource
@patch('flask_management_blueprint.management.management_resource.HealthCheck.check_resources_health')
@patch('flask_management_blueprint.management.management_resource.AppInfo.app_info')
def test_health_check_is_ok(mock_app_info, mock_health_check, test_app):
with test_app.test_request_context():
mock_health_check.return_value = []
mock_app_info.return_value = {
"ApplicationName": "mock1",
"ApplicationType": "mock2",
"BuildDate": "mock3",
"Version": "mock4",
"Status": health_status.OK[0]
}
response = management_resource.health_check()
assert response[1] is HTTPStatus.OK
@patch('flask_management_blueprint.management.management_resource.HealthCheck.check_resources_health')
@patch('flask_management_blueprint.management.management_resource.AppInfo.app_info')
def test_health_check_is_not_ok(mock_app_info, mock_health_check, test_app):
with test_app.test_request_context():
mock_health_check.return_value = []
mock_app_info.return_value = {
"ApplicationName": "mock1",
"ApplicationType": "mock2",
"BuildDate": "mock3",
"Version": "mock4",
"Status": health_status.CRITICAL[0]
}
response = management_resource.health_check()
assert response[1] is HTTPStatus.INTERNAL_SERVER_ERROR
| 38
| 102
| 0.721805
| 178
| 1,596
| 6.08427
| 0.252809
| 0.081256
| 0.132964
| 0.188366
| 0.893813
| 0.893813
| 0.812558
| 0.812558
| 0.812558
| 0.812558
| 0
| 0.009238
| 0.18609
| 1,596
| 41
| 103
| 38.926829
| 0.82448
| 0
| 0
| 0.625
| 0
| 0
| 0.298246
| 0.20802
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.125
| 0
| 0.1875
| 0.1875
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
161f3f30db996383bea9062b39c50132f94b29af
| 6,614
|
py
|
Python
|
tests/NMT_architectures/shallow_LSTM_GRU.py
|
davidwilby/nmt-keras
|
2feac8e7452e2a249bde03135e9230e087e4367f
|
[
"MIT"
] | null | null | null |
tests/NMT_architectures/shallow_LSTM_GRU.py
|
davidwilby/nmt-keras
|
2feac8e7452e2a249bde03135e9230e087e4367f
|
[
"MIT"
] | null | null | null |
tests/NMT_architectures/shallow_LSTM_GRU.py
|
davidwilby/nmt-keras
|
2feac8e7452e2a249bde03135e9230e087e4367f
|
[
"MIT"
] | 1
|
2021-01-08T22:14:20.000Z
|
2021-01-08T22:14:20.000Z
|
import argparse
import pytest
from keras import backend as K
from config import load_parameters
from data_engine.prepare_data import build_dataset
from nmt_keras.training import train_model
from nmt_keras.apply_model import sample_ensemble, score_corpus
def load_tests_params():
params = load_parameters()
params['BATCH_SIZE'] = 10
params['WEIGHT_DECAY'] = 1e-4
params['RECURRENT_WEIGHT_DECAY'] = 1e-4
params['DROPOUT_P'] = 0.01
params['RECURRENT_INPUT_DROPOUT_P'] = 0.01
params['RECURRENT_DROPOUT_P'] = 0.01
params['USE_NOISE'] = True
params['NOISE_AMOUNT'] = 0.01
params['USE_BATCH_NORMALIZATION'] = True
params['BATCH_NORMALIZATION_MODE'] = 1
params['SOURCE_TEXT_EMBEDDING_SIZE'] = 8
params['TARGET_TEXT_EMBEDDING_SIZE'] = 8
params['DECODER_HIDDEN_SIZE'] = 4
params['ENCODER_HIDDEN_SIZE'] = 4
params['ATTENTION_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['SKIP_VECTORS_HIDDEN_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['DOUBLE_STOCHASTIC_ATTENTION_REG'] = 0.7
params['RELOAD'] = 0
params['MAX_EPOCH'] = 1
params['USE_CUDNN'] = False
return params
def test_NMT_Bidir_LSTM_GRU():
params = load_tests_params()
# Current test params: Single layered BLSTM - GRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 1
params['BIDIRECTIONAL_DEEP_ENCODER'] = False
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'GRU'
params['N_LAYERS_DECODER'] = 1
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
params['STORE_PATH'] = K.backend() + '_test_train_models/' + params['MODEL_NAME'] + '/'
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print ("Training model")
train_model(params)
params['RELOAD'] = 1
print ("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = params['DATASET_STORE_PATH'] + '/Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl'
parser.text = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.target = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['TRG_LAN']
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print ("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print ("Done")
print ("Scoring corpus")
score_corpus(parser, params)
print ("Done")
def test_NMT_Unidir_LSTM_GRU():
params = load_tests_params()
# Current test params: Single layered LSTM - GRU
params['BIDIRECTIONAL_ENCODER'] = False
params['N_LAYERS_ENCODER'] = 1
params['BIDIRECTIONAL_DEEP_ENCODER'] = False
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'GRU'
params['N_LAYERS_DECODER'] = 1
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
params['STORE_PATH'] = K.backend() + '_test_train_models/' + params['MODEL_NAME'] + '/'
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print ("Training model")
train_model(params)
params['RELOAD'] = 1
print ("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = params['DATASET_STORE_PATH'] + '/Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl'
parser.text = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.target = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['TRG_LAN']
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print ("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print ("Done")
print ("Scoring corpus")
score_corpus(parser, params)
print ("Done")
if __name__ == '__main__':
pytest.main([__file__])
| 41.860759
| 143
| 0.650892
| 803
| 6,614
| 4.995019
| 0.169365
| 0.035901
| 0.025929
| 0.026926
| 0.831962
| 0.801296
| 0.771877
| 0.771877
| 0.771877
| 0.771877
| 0
| 0.007993
| 0.186574
| 6,614
| 157
| 144
| 42.127389
| 0.737546
| 0.039008
| 0
| 0.71875
| 0
| 0
| 0.330972
| 0.082979
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023438
| false
| 0
| 0.054688
| 0
| 0.085938
| 0.09375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1680c162547ac8db59f6854ede9a86bed3bf6979
| 95
|
py
|
Python
|
tests/test_colors.py
|
eng-tools/bwplot
|
bc3646390fbe3fb2e99f9720e6b3f723a23fd71a
|
[
"MIT"
] | null | null | null |
tests/test_colors.py
|
eng-tools/bwplot
|
bc3646390fbe3fb2e99f9720e6b3f723a23fd71a
|
[
"MIT"
] | null | null | null |
tests/test_colors.py
|
eng-tools/bwplot
|
bc3646390fbe3fb2e99f9720e6b3f723a23fd71a
|
[
"MIT"
] | null | null | null |
from bwplot import colors
def test_a_color():
assert colors.cbox(0) == (0.0, 0.0, 0.55)
| 13.571429
| 45
| 0.642105
| 18
| 95
| 3.277778
| 0.666667
| 0.169492
| 0.20339
| 0.20339
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.2
| 95
| 6
| 46
| 15.833333
| 0.671053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
168e8c718c4164befb786357b1537952567d9d87
| 92
|
py
|
Python
|
parameters_80.py
|
wasuaje/web2py5
|
02f310b9526f92c4ec62ab5b0271069a1c101e9f
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_80.py
|
wasuaje/web2py5
|
02f310b9526f92c4ec62ab5b0271069a1c101e9f
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_80.py
|
wasuaje/web2py5
|
02f310b9526f92c4ec62ab5b0271069a1c101e9f
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$b61e255fa07bb6a3$2d240cac08e666996562d0b053b802b8eac5425f"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.505495
| 0.01087
| 92
| 1
| 92
| 92
| 0.395604
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
16964ed13f1015f5b53e649d1afae397a5cd2a9b
| 17,468
|
py
|
Python
|
examples/rough_translated1/osgwidgetmessagebox.py
|
JaneliaSciComp/osgpyplusplus
|
a5ae3f69c7e9101a32d8cc95fe680dab292f75ac
|
[
"BSD-3-Clause"
] | 17
|
2015-06-01T12:19:46.000Z
|
2022-02-12T02:37:48.000Z
|
examples/rough_translated1/osgwidgetmessagebox.py
|
cmbruns/osgpyplusplus
|
f8bfca2cf841e15f6ddb41c958f3ad0d0b9e4b75
|
[
"BSD-3-Clause"
] | 7
|
2015-07-04T14:36:49.000Z
|
2015-07-23T18:09:49.000Z
|
examples/rough_translated1/osgwidgetmessagebox.py
|
cmbruns/osgpyplusplus
|
f8bfca2cf841e15f6ddb41c958f3ad0d0b9e4b75
|
[
"BSD-3-Clause"
] | 7
|
2015-11-28T17:00:31.000Z
|
2020-01-08T07:00:59.000Z
|
#!/bin/env python
# Automatically translated python version of
# OpenSceneGraph example program "osgwidgetmessagebox"
# !!! This program will need manual tuning before it will work. !!!
import sys
from osgpypp import osg
from osgpypp import osgAnimation
from osgpypp import osgDB
from osgpypp import osgWidget
# Translated from file 'osgwidgetmessagebox.cpp'
# -*-c++-*- osgWidget - Copyright Cedric Pinson 2008
#include <osgWidget/Util>
#include <osgWidget/WindowManager>
#include <osgWidget/Frame>
#include <osgWidget/Box>
#include <osgWidget/Widget>
#include <osgWidget/Types>
#include <osgDB/ReadFile>
#include <osgAnimation/EaseMotion>
#include <osg/io_utils>
#include <iostream>
MASK_2D = 0xF0000000
class MessageBox :
createButtonOk = osgWidget.Frame*( str theme, str text, str font, int fontSize)
createLabel = osgWidget.Label*( str string, str font, int size, osgWidget.Color color)
_window = osgWidget.Frame()
_button = osgWidget.Frame()
getButton = osgWidget.Frame*()
getWindow = osgWidget.Frame*()
create = bool( str themeMessage,
str themeButton,
str titleText,
str messageText,
str buttonText,
str font,
int fontSize)
osgWidget.Frame* MessageBox.getButton() return _button
osgWidget.Frame* MessageBox.getWindow() return _window
class AlphaSetterVisitor (osg.NodeVisitor) :
_alpha = float()
AlphaSetterVisitor( float alpha = 1.0):osg.NodeVisitor(TRAVERSE_ALL_CHILDREN) _alpha = alpha
def apply(node):
win = dynamic_cast<osgWidget.Window*>(node)
if win :
# osgWidget.warn(), "I am in Window: ", win.getName()
for (osgWidget.Window.Iterator it = win.begin() it not = win.end() it++)
# osgWidget.warn(), " I am operating on Widget: ", it.getName()
color = it.getColor()
color[3] = color[3] *_alpha
it.setColor(color)
color = win.getBackground().getColor()
color[3] = color[3] *_alpha
win.getBackground().setColor(color)
traverse(node)
class ColorSetterVisitor (osg.NodeVisitor) :
_color = osgWidget.Color()
ColorSetterVisitor( osgWidget.Color color):osg.NodeVisitor(TRAVERSE_ALL_CHILDREN) _color = color
def apply(node):
win = dynamic_cast<osgWidget.Window*>(node)
if win :
# osgWidget.warn(), "I am in Window: ", win.getName()
for (osgWidget.Window.Iterator it = win.begin() it not = win.end() it++)
# osgWidget.warn(), " I am operating on Widget: ", it.getName()
# color = it.getColor()
# color[3] = color[3] *_alpha
it.setColor(_color)
# color = win.getBackground().getColor()
# color[3] = color[3] *_alpha
win.getBackground().setColor(osgWidget.Color(0,0,0,0))
traverse(node)
struct EventOK : public osgWidget.Callback, osg.NodeCallback
typedef osgAnimation.OutCubicMotion WidgetMotion
# typedef osgAnimation.OutQuartMotion WidgetMotion
_motionOver = WidgetMotion()
_motionLeave = WidgetMotion()
_lastUpdate = double()
_defaultColor = osgWidget.Color()
_overColor = osgWidget.Color()
_over = bool()
_frame = osgWidget.Frame()
_width = float()
_height = float()
_matrix = osg.Matrix()
EventOK(osgWidget.Frame* frame) : osgWidget.Callback(osgWidget.EVENT_ALL), _frame(frame)
_motionOver = WidgetMotion(0.0, 0.4)
_motionLeave = WidgetMotion(0.0, 0.5)
_defaultColor = _frame.getEmbeddedWindow().getColor()
_overColor = osgWidget.Color(229.0/255.0,
103.0/255.0,
17.0/255,
_defaultColor[3])
_over = False
bool operator()(osgWidget.Event ev)
if ev.type == osgWidget.EVENT_MOUSE_ENTER :
_over = True
_width = _frame.getWidth()
_height = _frame.getHeight()
_motionOver.reset()
_matrix = _frame.getMatrix()
#_frame.setMatrix(osg.Matrix.scale(2, 2, 1) * _frame.getMatrix())
_frame.setScale(1.1) #osg.Matrix.scale(2, 2, 1) * _frame.getMatrix())
_frame.update() #osg.Matrix.scale(2, 2, 1) * _frame.getMatrix())
print "enter"
return True
elif ev.type == osgWidget.EVENT_MOUSE_LEAVE :
_over = False
_motionLeave.reset()
#_frame.setMatrix(_matrix)
_frame.setScale(1.0)
_frame.update()
print "leave"
return True
return False
void operator()(osg.Node* node, osg.NodeVisitor* nv)
if nv.getVisitorType() == osg.NodeVisitor.UPDATE_VISITOR :
fs = nv.getFrameStamp()
dt = fs.getSimulationTime() - _lastUpdate
_lastUpdate = fs.getSimulationTime()
if _frame.valid() :
value = float()
if _over :
_motionOver.update(dt)
value = _motionOver.getValue()
else:
_motionLeave.update(dt)
value = 1.0 - _motionLeave.getValue()
c = _defaultColor + ((_overColor - _defaultColor) * value)
colorSetter = ColorSetterVisitor(c)
_frame.accept(colorSetter)
node.traverse(*nv)
osgWidget.Label* MessageBox.createLabel( str string, str font, int size, osgWidget.Color color)
label = osgWidget.Label("", "")
label.setFont(font)
label.setFontSize(size)
label.setFontColor(color)
label.setColor(osgWidget.Color(0,0,0,0))
label.setLabel(string)
label.setCanFill(True)
return label
osgWidget.Frame* MessageBox.createButtonOk( str theme,
str text,
str font,
int fontSize)
frame = osgWidget.Frame.createSimpleFrameFromTheme(
"ButtonOK",
osgDB.readImageFile(theme),
300.0,
50.0,
osgWidget.Frame.FRAME_TEXTURE
)
frame.getBackground().setColor(0.0, 0.0, 0.0, 0.0)
label = createLabel(text, font, fontSize, osgWidget.Color(0,0,0,1))
box = osgWidget.Box("HBOX", osgWidget.Box.HORIZONTAL)
box.addWidget(label)
box.resize()
colorBack = frame.getEmbeddedWindow().getColor()
box.getBackground().setColor(colorBack)
frame.getEmbeddedWindow().setWindow(box)
box.setVisibilityMode(osgWidget.Window.VM_ENTIRE)
box.setEventMask(osgWidget.EVENT_NONE)
frame.setVisibilityMode(osgWidget.Window.VM_ENTIRE)
frame.resizeFrame(box.getWidth(), box.getHeight())
frame.resizeAdd(0, 0)
event = EventOK(frame)
frame.setUpdateCallback(event)
frame.addCallback(event)
return frame.release()
bool MessageBox.create( str themeMessage,
str themeButton,
str titleText,
str messageText,
str buttonText,
str font,
int fontSize)
frame = osgWidget.Frame.createSimpleFrameFromTheme(
"error",
osgDB.readImageFile(themeMessage),
300.0,
50.0,
osgWidget.Frame.FRAME_ALL
)
frame.getBackground().setColor(0.0, 0.0, 0.0, 0.0)
labelText = createLabel(messageText, font, fontSize, osgWidget.Color(0,0,0,1))
labelTitle = createLabel(titleText, font, fontSize+5, osgWidget.Color(0.4,0,0,1))
box = osgWidget.Box("VBOX", osgWidget.Box.VERTICAL)
_button = createButtonOk(themeButton, buttonText, font, fontSize)
buttonOK = _button.embed()
_button.setVisibilityMode(osgWidget.Window.VM_ENTIRE)
buttonOK.setColor(osgWidget.Color(0,0,0,0))
buttonOK.setCanFill(False)
labelTitle.setPadBottom(30.0)
labelText.setPadBottom(30.0)
box.addWidget(buttonOK)
box.addWidget(labelText)
box.addWidget(labelTitle)
colorBack = frame.getEmbeddedWindow().getColor()
box.getBackground().setColor(colorBack)
frame.setWindow(box)
box.resize()
frame.resizeFrame(box.getWidth(), box.getHeight())
_window = frame
return True
LABEL1 = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed\n"
"do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n"
"Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris\n"
"nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in..."
def main(argv):
viewer = osgViewer.Viewer()
wm = osgWidget.WindowManager(
viewer,
1280.0,
1024.0,
MASK_2D,
osgWidget.WindowManager.WM_PICK_DEBUG
)
fontSize = 20
font = "fonts/arial.ttf"
buttonTheme = "osgWidget/theme-8-shadow.png"
borderTheme = "osgWidget/theme-8.png"
message = MessageBox()
message.create(borderTheme,
buttonTheme,
"Error - Critical",
LABEL1,
"Quit",
font,
fontSize)
alpha = AlphaSetterVisitor(.8)
message.getWindow().accept(alpha)
wm.addChild(message.getWindow())
# center
w = wm.getWidth()
h = wm.getHeight()
ww = message.getWindow().getWidth()
hw = message.getWindow().getHeight()
ox = (w - ww) / 2
oy = (h - hw) / 2
message.getWindow().setPosition(osgWidget.Point(
osg.round(ox), osg.round(oy), message.getWindow().getPosition()[2])
)
# frame.resizeAdd(30, 30)
# alpha = AlphaSetterVisitor(.8)
# frame.accept(alpha)
return osgWidget.createExample(viewer, wm) #osgDB.readNodeFile("cow.osgt"))
#if 0
class AlphaSetterVisitor (osg.NodeVisitor) :
_alpha = float()
AlphaSetterVisitor( float alpha = 1.0):osg.NodeVisitor(TRAVERSE_ALL_CHILDREN) _alpha = alpha
def apply(node):
win = dynamic_cast<osgWidget.Window*>(node)
if win :
# osgWidget.warn(), "I am in Window: ", win.getName()
for (osgWidget.Window.Iterator it = win.begin() it not = win.end() it++)
# osgWidget.warn(), " I am operating on Widget: ", it.getName()
color = it.getColor()
color[3] = color[3] *_alpha
it.setColor(color)
color = win.getBackground().getColor()
color[3] = color[3] *_alpha
win.getBackground().setColor(color)
traverse(node)
class ColorSetterVisitor (osg.NodeVisitor) :
_color = osgWidget.Color()
ColorSetterVisitor( osgWidget.Color color):osg.NodeVisitor(TRAVERSE_ALL_CHILDREN) _color = color
def apply(node):
win = dynamic_cast<osgWidget.Window*>(node)
if win :
# osgWidget.warn(), "I am in Window: ", win.getName()
for (osgWidget.Window.Iterator it = win.begin() it not = win.end() it++)
# osgWidget.warn(), " I am operating on Widget: ", it.getName()
# osgWidget.Color color = it.getColor()
# color[3] = color[3] *_alpha
it.setColor(_color)
# osgWidget.Color color = win.getBackground().getColor()
# color[3] = color[3] *_alpha
win.getBackground().setColor(osgWidget.Color(0,0,0,0))
traverse(node)
struct EventOK : public osgWidget.Callback, osg.NodeCallback
typedef osgAnimation.OutQuartMotion WidgetMotion
_motionOver = WidgetMotion()
_motionLeave = WidgetMotion()
_lastUpdate = double()
_defaultColor = osgWidget.Color()
_overColor = osgWidget.Color()
_over = bool()
_frame = osgWidget.Frame()
_width = float()
_height = float()
EventOK(osgWidget.Frame* frame) : osgWidget.Callback(osgWidget.EVENT_ALL), _frame(frame)
_motionOver = WidgetMotion(0.0, 0.4)
_motionLeave = WidgetMotion(0.0, 0.5)
_defaultColor = _frame.getEmbeddedWindow().getColor()
_overColor = osgWidget.Color(229.0/255.0,
103.0/255.0,
17.0/255,
_defaultColor[3])
_over = False
bool operator()(osgWidget.Event ev)
if ev.type == osgWidget.EVENT_MOUSE_ENTER :
_over = True
# print "Enter"
_width = _frame.getWidth()
_height = _frame.getHeight()
_motionOver.reset()
# _frame.resize(_width * 1.2, _height * 1.2)
return True
elif ev.type == osgWidget.EVENT_MOUSE_LEAVE :
_over = False
# print "Leave"
# _frame.resize(_width, _height)
_motionLeave.reset()
return True
return False
void operator()(osg.Node* node, osg.NodeVisitor* nv)
if nv.getVisitorType() == osg.NodeVisitor.UPDATE_VISITOR :
fs = nv.getFrameStamp()
dt = fs.getSimulationTime() - _lastUpdate
_lastUpdate = fs.getSimulationTime()
if _frame.valid() :
value = float()
if _over :
_motionOver.update(dt)
value = _motionOver.getValue()
else:
_motionLeave.update(dt)
value = 1.0 - _motionLeave.getValue()
c = _defaultColor + ((_overColor - _defaultColor) * value)
colorSetter = ColorSetterVisitor(c)
_frame.accept(colorSetter)
node.traverse(*nv)
def createLabel(string, font, size, color):
label = osgWidget.Label("", "")
label.setFont(font)
label.setFontSize(size)
label.setFontColor(color)
label.setColor(osgWidget.Color(0,0,0,0))
label.setLabel(string)
label.setCanFill(True)
return label
def createButtonOk(theme, text, fontSize):
frame = osgWidget.Frame.createSimpleFrameFromTheme(
"ButtonOK",
osgDB.readImageFile(theme),
300.0,
50.0,
osgWidget.Frame.FRAME_TEXTURE
)
frame.getBackground().setColor(0.0, 0.0, 0.0, 0.0)
label = createLabel(text, "fonts/Vera.ttf", fontSize, osgWidget.Color(0,0,0,1))
box = osgWidget.Box("HBOX", osgWidget.Box.HORIZONTAL)
box.addWidget(label)
box.resize()
colorBack = frame.getEmbeddedWindow().getColor()
box.getBackground().setColor(colorBack)
frame.getEmbeddedWindow().setWindow(box)
box.setVisibilityMode(osgWidget.Window.VM_ENTIRE)
box.setEventMask(osgWidget.EVENT_NONE)
frame.resizeFrame(box.getWidth(), box.getHeight())
frame.resizeAdd(0, 0)
event = EventOK(frame)
frame.setUpdateCallback(event)
frame.addCallback(event)
return frame.release()
def createErrorMessage(themeMessage, themeButton, titleText, messageText, buttonText, font, fontSize):
frame = osgWidget.Frame.createSimpleFrameFromTheme(
"error",
osgDB.readImageFile(themeMessage),
300.0,
50.0,
osgWidget.Frame.FRAME_ALL
)
frame.getBackground().setColor(0.0, 0.0, 0.0, 0.0)
labelText = createLabel(messageText, font, fontSize, osgWidget.Color(0,0,0,1))
labelTitle = createLabel(titleText, font, fontSize+5, osgWidget.Color(0.4,0,0,1))
box = osgWidget.Box("VBOX", osgWidget.Box.VERTICAL)
buttonOK = createButtonOk(themeButton, buttonText, fontSize).embed()
buttonOK.setColor(osgWidget.Color(0,0,0,0))
buttonOK.setCanFill(False)
box.addWidget(buttonOK)
box.addWidget(labelText)
box.addWidget(labelTitle)
colorBack = frame.getEmbeddedWindow().getColor()
box.getBackground().setColor(colorBack)
frame.setWindow(box)
box.resize()
frame.resizeFrame(box.getWidth(), box.getHeight())
return frame.release()
LABEL1 = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed\n"
"do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n"
"Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris\n"
"nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in..."
def main(argv):
theme = "osgWidget/theme-1.png"
if argc > 1 :
theme = str(argv[1])
viewer = osgViewer.Viewer()
wm = osgWidget.WindowManager(
viewer,
1280.0,
1024.0,
MASK_2D,
osgWidget.WindowManager.WM_PICK_DEBUG
)
frame = createErrorMessage(theme,
"osgWidget/theme-8-shadow.png",
"Error - Critical",
LABEL1,
"Ok",
"fonts/Vera.ttf",
20)
# Add everything to the WindowManager.
wm.addChild(frame)
frame.resizeAdd(30, 30)
alpha = AlphaSetterVisitor(.8)
frame.accept(alpha)
return osgWidget.createExample(viewer, wm, osgDB.readNodeFile("cow.osgt"))
#endif
if __name__ == "__main__":
main(sys.argv)
| 28.083601
| 102
| 0.590337
| 1,763
| 17,468
| 5.753261
| 0.163925
| 0.013014
| 0.013014
| 0.010253
| 0.78882
| 0.7762
| 0.7762
| 0.7762
| 0.756581
| 0.741694
| 0
| 0.023648
| 0.300378
| 17,468
| 621
| 103
| 28.128824
| 0.806317
| 0.117186
| 0
| 0.77748
| 0
| 0
| 0.050286
| 0.006375
| 0
| 0
| 0.000651
| 0
| 0
| 0
| null | null | 0
| 0.013405
| null | null | 0.005362
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc4f1e1afb9a365834ed1fa5dec17f580929a14f
| 55,610
|
py
|
Python
|
fireant/tests/queries/test_data_blending_integration.py
|
mikeengland/fireant
|
63c12728c11f1fb252265459f8b8f384d20414b9
|
[
"Apache-2.0"
] | 122
|
2016-08-05T13:34:52.000Z
|
2022-03-15T13:21:13.000Z
|
fireant/tests/queries/test_data_blending_integration.py
|
mikeengland/fireant
|
63c12728c11f1fb252265459f8b8f384d20414b9
|
[
"Apache-2.0"
] | 321
|
2016-08-10T08:48:15.000Z
|
2021-07-28T13:08:18.000Z
|
fireant/tests/queries/test_data_blending_integration.py
|
mikeengland/fireant
|
63c12728c11f1fb252265459f8b8f384d20414b9
|
[
"Apache-2.0"
] | 27
|
2016-08-10T08:11:08.000Z
|
2021-08-23T08:14:37.000Z
|
from unittest import TestCase
from pypika import Tables, functions as fn
import fireant as f
from fireant import DataSet, DataType, Database, Field, ReactTable
from fireant.tests.database.mock_database import TestDatabase
class DataSetBlenderIntegrationTests(TestCase):
maxDiff = None
def test_select_only_a_metric_from_primary_dataset(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (blend_ds.query().dimension(blend_ds.fields.timestamp).widget(ReactTable(blend_ds.fields.metric0))).sql
(query,) = sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric0" "$metric0" '
'FROM (SELECT "timestamp" "$timestamp",SUM("metric") "$metric0" FROM "test0" GROUP BY "$timestamp") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query),
)
def test_use_metric_from_primary_dataset_when_alias_conflicts_with_metric_from_secondary(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric",
label="Metric",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric",
label="Metric",
definition=t1.metric,
data_type=DataType.number,
),
],
)
blend_ds = (
primary_ds.blend(secondary_ds)
.on_dimensions()
.extra_fields(
Field(
"metric_share",
label="Metric Share",
definition=primary_ds.fields.metric / secondary_ds.fields.metric,
data_type=DataType.number,
)
)
)
sql = (
blend_ds.query().dimension(blend_ds.fields.timestamp).widget(ReactTable(blend_ds.fields.metric_share))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric"/"sq1"."$metric" "$metric_share" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
def test_produce_a_sql_with_multiple_subqueries_in_from_clause_when_blender_not_mapped_on_any_fields(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"metric2",
label="Metric2",
definition=t1.metric,
data_type=DataType.number,
)
],
)
blend_ds = primary_ds.blend(secondary_ds).on({})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(ReactTable(blend_ds.fields.metric1, blend_ds.fields.metric2))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric1" "$metric1",'
'"sq1"."$metric2" "$metric2" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric1" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0",'
"("
"SELECT "
'"metric" "$metric2" '
'FROM "test1"'
') "sq1" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
def test_select_unmapped_dimension_from_secondary_but_only_metric_from_primary(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"account",
label="Account",
definition=t1.account,
data_type=DataType.number,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp, blend_ds.fields.account)
.widget(ReactTable(blend_ds.fields.metric0))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq1"."$account" "$account",'
'"sq0"."$metric0" "$metric0" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"account" "$account" '
'FROM "test1" '
'GROUP BY "$timestamp","$account"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp","$account" '
'LIMIT 200000',
str(query),
)
def test_select_unmapped_dimension_from_primary_but_only_metric_from_secondary(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"account",
label="Account",
definition=t0.account,
data_type=DataType.number,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp, blend_ds.fields.account)
.widget(ReactTable(blend_ds.fields.metric1))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$account" "$account",'
'"sq1"."$metric1" "$metric1" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"account" "$account" '
'FROM "test0" '
'GROUP BY "$timestamp","$account"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp","$account" '
'LIMIT 200000',
str(query),
)
def test_filter_unmapped_dimension_from_primary_with_only_metric_selected_from_secondary(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"account",
label="Account",
definition=t0.account,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(ReactTable(blend_ds.fields.metric1))
.filter(blend_ds.fields.account.isin(["123"]))
).sql
(query,) = sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq1"."$metric1" "$metric1" '
'FROM ('
'SELECT "timestamp" "$timestamp" FROM "test0" '
'WHERE "account" IN (\'123\') '
'GROUP BY "$timestamp") "sq0" '
'LEFT JOIN ('
'SELECT "timestamp" "$timestamp",SUM("metric") "$metric1" FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
def test_select_unmapped_dimension_from_primary_and_metrics_from_both_datasets(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"account",
label="Account",
definition=t0.account,
data_type=DataType.number,
),
Field(
"metric0",
label="Metric0",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=t1.metric,
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp, blend_ds.fields.account)
.widget(ReactTable(blend_ds.fields.metric0, blend_ds.fields.metric1))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$account" "$account",'
'"sq0"."$metric0" "$metric0",'
'"sq1"."$metric1" "$metric1" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"account" "$account",'
'"metric" "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp","$account"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp","$account" '
'LIMIT 200000',
str(query),
)
def test_do_not_include_fields_with_conflicting_aliases_in_subqueries_unless_mapped(
self,
):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (blend_ds.query().dimension(blend_ds.fields.timestamp).widget(ReactTable(blend_ds.fields.metric0))).sql
(query,) = sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric0" "$metric0" FROM '
'(SELECT "timestamp" "$timestamp",SUM("metric") "$metric0" FROM "test0" GROUP BY "$timestamp") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query),
)
def test_include_mapped_field_in_subqueries_when_the_aliases_are_different(self):
db = Database()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field("a", label="A", definition=t0.a, data_type=DataType.number),
Field(
"metric0",
label="Metric0",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field("b", label="B", definition=t1.b, data_type=DataType.number),
Field(
"metric1",
label="Metric1",
definition=t1.metric,
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.a: secondary_ds.fields.b})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.a)
.widget(ReactTable(blend_ds.fields.metric0, blend_ds.fields.metric1))
).sql
(query,) = sql
self.assertEqual(
"SELECT "
'"sq0"."$a" "$a",'
'"sq0"."$metric0" "$metric0",'
'"sq1"."$metric1" "$metric1" '
"FROM ("
"SELECT "
'"a" "$a",'
'"metric" "$metric0" '
'FROM "test0" '
'GROUP BY "$a"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"b" "$b",'
'"metric" "$metric1" '
'FROM "test1" '
'GROUP BY "$b"'
') "sq1" ON "sq0"."$a"="sq1"."$b" '
'ORDER BY "$a" '
'LIMIT 200000',
str(query),
)
def test_blended_references(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=t1.metric,
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(ReactTable(blend_ds.fields.metric0), ReactTable(blend_ds.fields.metric1))
.reference(f.DayOverDay(blend_ds.fields.timestamp))
).sql
(query_1, query_2) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0" "$metric0",'
'"sq1"."$metric1" "$metric1" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_1),
)
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0_dod" "$metric0_dod",'
'"sq1"."$metric1_dod" "$metric1_dod" '
"FROM ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'"metric" "$metric0_dod" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'"metric" "$metric1_dod" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_2),
)
def test_blended_references_with_order_by_on_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(ReactTable(blend_ds.fields.metric0, blend_ds.fields.metric1))
.reference(f.DayOverDay(blend_ds.fields.timestamp))
.orderby(blend_ds.fields.metric1)
).sql
(query_1, query_2) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0" "$metric0",'
'"sq1"."$metric1" "$metric1" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$metric1" '
'LIMIT 200000',
str(query_1),
)
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0_dod" "$metric0_dod",'
'"sq1"."$metric1_dod" "$metric1_dod" '
"FROM ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'SUM("metric") "$metric0_dod" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'SUM("metric") "$metric1_dod" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$metric1_dod" '
'LIMIT 200000',
str(query_2),
)
def test_blended_references_with_order_by_on_unused_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(ReactTable(blend_ds.fields.metric1))
.reference(f.DayOverDay(blend_ds.fields.timestamp))
.orderby(blend_ds.fields.metric0)
).sql
(query_1, query_2) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq1"."$metric1" "$metric1",'
'"sq0"."$metric0" "$metric0" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$metric0" '
'LIMIT 200000',
str(query_1),
)
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq1"."$metric1_dod" "$metric1_dod",'
'"sq0"."$metric0_dod" "$metric0_dod" '
"FROM ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'SUM("metric") "$metric0_dod" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
'SELECT TIMESTAMPADD(day,1,"timestamp") "$timestamp",'
'SUM("metric") "$metric1_dod" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$metric0_dod" '
'LIMIT 200000',
str(query_2),
)
def test_optimization_with_complex_blended_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"other_metric_name",
label="Metric",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
Field(
"metric_2",
label="Metric 2",
definition=fn.Sum(t1.metric_2),
data_type=DataType.number,
),
],
)
blend_ds = (
primary_ds.blend(secondary_ds)
.on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
.extra_fields(
Field(
"blended_metric",
label="Blended Metric",
definition=secondary_ds.fields.other_metric_name / secondary_ds.fields.metric_2,
data_type=DataType.number,
)
)
)
query = (
blend_ds.query().dimension(blend_ds.fields.timestamp).widget(f.Widget(blend_ds.fields.blended_metric))
).sql[0]
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$other_metric_name"/"sq0"."$metric_2" "$blended_metric" '
'FROM ('
'SELECT "timestamp" "$timestamp",SUM("metric") "$other_metric_name",SUM("metric_2") "$metric_2" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq0" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
def test_blending_with_only_metric_filter_selected_in_secondary_dataset(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
query = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(f.Widget(blend_ds.fields.metric0))
.filter(blend_ds.fields.metric1.between(10, 20))
).sql[0]
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0" "$metric0" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp" '
'FROM "test1" '
'GROUP BY "$timestamp" '
'HAVING SUM("metric") BETWEEN 10 AND 20'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
def test_blending_with_omit_from_rollup_filter_of_blended_field(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(f.Widget(f.Share(blend_ds.fields.metric0, over=blend_ds.fields.timestamp)))
.filter(f.OmitFromRollup(blend_ds.fields.metric1.between(10, 20)))
).sql
(query_1, query_2) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0" "$metric0" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'SUM("metric") "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp" '
'FROM "test1" '
'GROUP BY "$timestamp" '
'HAVING SUM("metric") BETWEEN 10 AND 20'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_1),
)
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0" "$metric0" '
"FROM ("
"SELECT "
"'_FIREANT_ROLLUP_VALUE_' \"$timestamp\","
'SUM("metric") "$metric0" '
'FROM "test0"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
"'_FIREANT_ROLLUP_VALUE_' \"$timestamp\" "
'FROM "test1"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_2),
)
def test_blending_with_share_operation_on_primary_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(f.Widget(f.Share(blend_ds.fields.metric0, over=blend_ds.fields.timestamp)))
).sql
(query_1, query_2) = sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric0" "$metric0" FROM '
'(SELECT "timestamp" "$timestamp",SUM("metric") "$metric0" FROM "test0" GROUP BY "$timestamp") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query_1),
)
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric0" "$metric0" FROM '
'(SELECT \'_FIREANT_ROLLUP_VALUE_\' "$timestamp",SUM("metric") "$metric0" FROM "test0") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query_2),
)
def test_blending_with_share_operation_on_secondary_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"account",
label="Account",
definition=t0.account,
data_type=DataType.number,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
],
)
blend_ds = primary_ds.blend(secondary_ds).on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(f.Widget(f.Share(blend_ds.fields.metric1, over=blend_ds.fields.timestamp)))
).sql
(query_1, query_2) = sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric1" "$metric1" FROM '
'(SELECT "timestamp" "$timestamp",SUM("metric") "$metric1" FROM "test1" GROUP BY "$timestamp") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query_1),
)
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric1" "$metric1" FROM '
'(SELECT \'_FIREANT_ROLLUP_VALUE_\' "$timestamp",SUM("metric") "$metric1" FROM "test1") "sq0" '
'ORDER BY "$timestamp" LIMIT 200000',
str(query_2),
)
def test_share_on_blended_metric(self):
db = TestDatabase()
t0, t1 = Tables("test0", "test1")
primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=t0.metric,
data_type=DataType.number,
),
],
)
secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=t1.metric,
data_type=DataType.number,
),
],
)
blend_ds = (
primary_ds.blend(secondary_ds)
.on({primary_ds.fields.timestamp: secondary_ds.fields.timestamp})
.extra_fields(
Field(
"sum",
label="sum of two metrics in different datasets",
definition=(primary_ds.fields["metric0"] + secondary_ds.fields["metric1"]),
data_type=DataType.number,
)
)
)
sql = (
blend_ds.query()
.dimension(blend_ds.fields.timestamp)
.widget(f.Widget(f.Share(blend_ds.fields.sum, over=blend_ds.fields.timestamp)))
).sql
(query_1, query_2) = sql
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0"+"sq1"."$metric1" "$sum" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_1),
)
self.assertEqual(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0"+"sq1"."$metric1" "$sum" '
"FROM ("
"SELECT "
"'_FIREANT_ROLLUP_VALUE_' \"$timestamp\","
'"metric" "$metric0" '
'FROM "test0"'
') "sq0" '
"LEFT JOIN ("
"SELECT "
"'_FIREANT_ROLLUP_VALUE_' \"$timestamp\","
'"metric" "$metric1" '
'FROM "test1"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query_2),
)
class MultipleDatasetsBlendedEdgeCaseTests(TestCase):
@classmethod
def setUpClass(cls):
db = Database()
t0, t1, t2 = Tables("test0", "test1", "test2")
cls.primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=fn.Sum(t0.metric),
data_type=DataType.number,
),
Field(
"duplicate_metric",
label="DuplicateMetricSet0",
definition=fn.Sum(t0.duplicate),
data_type=DataType.number,
),
Field(
"another_dimension",
label="Another Dimension",
definition=t0.dim,
data_type=DataType.number,
),
],
)
cls.primary_ds.id = 0
cls.secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=fn.Sum(t1.metric),
data_type=DataType.number,
),
Field(
"another_dimension",
label="Another Dimension",
definition=t1.dim,
data_type=DataType.number,
),
],
)
cls.secondary_ds.id = 1
cls.tertiary_ds = DataSet(
table=t2,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t2.timestamp,
data_type=DataType.date,
),
Field(
"metric2",
label="Metric2",
definition=fn.Sum(t2.metric),
data_type=DataType.number,
),
Field(
"duplicate_metric",
label="DuplicateMetricSet2",
definition=fn.Sum(t2.duplicate),
data_type=DataType.number,
),
],
)
cls.tertiary_ds.id = 2
cls.blend_ds = (
cls.primary_ds.blend(cls.secondary_ds)
.on(
{
cls.primary_ds.fields.timestamp: cls.secondary_ds.fields.timestamp,
cls.primary_ds.fields.another_dimension: cls.secondary_ds.fields.another_dimension,
}
)
.blend(cls.tertiary_ds)
.on({cls.primary_ds.fields.timestamp: cls.tertiary_ds.fields.timestamp})
)
def test_selecting_just_one_metric_in_non_primary_dataset(self):
blender = self.blend_ds.extra_fields(
Field(
"only_metric2",
label="Metric Two",
definition=self.tertiary_ds.fields.metric2,
data_type=DataType.number,
)
)
(query,) = blender.query().widget(ReactTable(blender.fields.only_metric2)).sql
self.assertEqual(
'SELECT "sq0"."$metric2" "$only_metric2" '
'FROM (SELECT SUM("metric") "$metric2" FROM "test2") "sq0" '
'ORDER BY 1 LIMIT 200000',
str(query),
)
def test_selecting_metric_with_duplicate_name_throws_error(self):
with self.assertRaises(ValueError):
self.blend_ds.extra_fields(
Field(
"duplicate_metric",
label="BlendedDuplicateMetric",
definition=self.primary_ds.fields.duplicate_metric + self.tertiary_ds.fields.metric2,
data_type=DataType.number,
),
)
def test_select_dimension_that_is_only_in_two_out_of_three_datasets(self):
(query,) = (
self.blend_ds.query()
.dimension(self.blend_ds.fields.another_dimension)
.widget(ReactTable(self.blend_ds.fields.metric2))
).sql
self.assertEqual(
'SELECT "sq0"."$another_dimension" "$another_dimension","sq1"."$metric2" "$metric2" '
'FROM (SELECT "dim" "$another_dimension" FROM "test0" GROUP BY "$another_dimension") "sq0",'
'(SELECT SUM("metric") "$metric2" FROM "test2") "sq1" '
'ORDER BY "$another_dimension" '
'LIMIT 200000',
str(query),
)
def test_select_dimension_in_third_dataset(self):
(query,) = (
self.blend_ds.query()
.dimension(self.blend_ds.fields.timestamp)
.widget(ReactTable(self.blend_ds.fields.metric2))
).sql
self.assertEqual(
'SELECT "sq0"."$timestamp" "$timestamp","sq0"."$metric2" "$metric2" FROM '
'(SELECT "timestamp" "$timestamp",SUM("metric") "$metric2" FROM "test2" GROUP BY "$timestamp") "sq0" '
'ORDER BY "$timestamp" '
'LIMIT 200000',
str(query),
)
class DataSetBlenderMultipleDatasetsTests(TestCase):
@classmethod
def setUpClass(cls):
db = Database()
t0, t1, t2, t3 = Tables("test0", "test1", "test2", "test3")
cls.primary_ds = DataSet(
table=t0,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t0.timestamp,
data_type=DataType.date,
),
Field(
"metric0",
label="Metric0",
definition=t0.metric,
data_type=DataType.number,
),
],
)
cls.primary_ds.id = 0
cls.secondary_ds = DataSet(
table=t1,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t1.timestamp,
data_type=DataType.date,
),
Field(
"metric1",
label="Metric1",
definition=t1.metric,
data_type=DataType.number,
),
],
)
cls.secondary_ds.id = 1
cls.tertiary_ds = DataSet(
table=t2,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t2.timestamp,
data_type=DataType.date,
),
Field(
"metric2",
label="Metric2",
definition=t2.metric,
data_type=DataType.number,
),
],
)
cls.tertiary_ds.id = 2
cls.quaternary_ds = DataSet(
table=t3,
database=db,
fields=[
Field(
"timestamp",
label="Timestamp",
definition=t3.timestamp,
data_type=DataType.date,
),
Field(
"metric3",
label="Metric3",
definition=t3.metric,
data_type=DataType.number,
),
],
)
cls.quaternary_ds.id = 3
cls.blend_ds = (
cls.primary_ds.blend(cls.secondary_ds)
.on_dimensions()
.blend(cls.tertiary_ds)
.on_dimensions()
.blend(cls.quaternary_ds)
.on_dimensions()
)
def _do_test(self, blender):
self.maxDiff = None
(query,) = (
blender.query().dimension(blender.fields.timestamp).widget(ReactTable(blender.fields.metric_share))
).sql
self.assertEqual(
(
"SELECT "
'"sq0"."$timestamp" "$timestamp",'
'"sq0"."$metric0"/"sq1"."$metric1"/"sq2"."$metric2"/"sq3"."$metric3" "$metric_share" '
"FROM ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric0" '
'FROM "test0" '
'GROUP BY "$timestamp"'
') "sq0",'
'(SELECT "metric" "$metric2" FROM "test2") "sq2",'
'(SELECT "metric" "$metric3" FROM "test3") "sq3" '
"LEFT JOIN ("
"SELECT "
'"timestamp" "$timestamp",'
'"metric" "$metric1" '
'FROM "test1" '
'GROUP BY "$timestamp"'
') "sq1" ON "sq0"."$timestamp"="sq1"."$timestamp" '
'ORDER BY "$timestamp" '
'LIMIT 200000'
),
str(query),
)
def test_dataset_blender_fourway_flattens_on_join_criteria_to_build_on_primary_dataset(
self,
):
self._do_test(
self.blend_ds.extra_fields(
Field(
"metric_share",
label="Metric Share",
definition=self.primary_ds.fields.metric0
/ self.secondary_ds.fields.metric1
/ self.tertiary_ds.fields.metric2
/ self.quaternary_ds.fields.metric3,
data_type=DataType.number,
)
)
)
def test_dataset_using_fields_refering_top_blender_maps_to_correct_field(self):
self._do_test(
self.blend_ds.extra_fields(
Field(
"metric_share",
label="Metric Share",
definition=self.blend_ds.fields.metric0
/ self.blend_ds.fields.metric1
/ self.blend_ds.fields.metric2
/ self.blend_ds.fields.metric3,
data_type=DataType.number,
)
)
)
| 32.615836
| 119
| 0.431541
| 4,343
| 55,610
| 5.366567
| 0.041446
| 0.039473
| 0.068649
| 0.056635
| 0.882053
| 0.866392
| 0.838804
| 0.830094
| 0.814605
| 0.797486
| 0
| 0.030006
| 0.448642
| 55,610
| 1,704
| 120
| 32.634977
| 0.730146
| 0
| 0
| 0.843924
| 0
| 0.004318
| 0.202104
| 0.034149
| 0
| 0
| 0
| 0
| 0.018507
| 1
| 0.016656
| false
| 0
| 0.003085
| 0
| 0.022209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bca757b31e5d0897e2521406b4effb6ff1863151
| 854
|
py
|
Python
|
tests/converters/test_dromedary_case.py
|
gieseladev/lettercase
|
2b4b97d5b96fcb5cd12f2eec93e0c64c78b84f6f
|
[
"MIT"
] | null | null | null |
tests/converters/test_dromedary_case.py
|
gieseladev/lettercase
|
2b4b97d5b96fcb5cd12f2eec93e0c64c78b84f6f
|
[
"MIT"
] | null | null | null |
tests/converters/test_dromedary_case.py
|
gieseladev/lettercase
|
2b4b97d5b96fcb5cd12f2eec93e0c64c78b84f6f
|
[
"MIT"
] | null | null | null |
from lettercase import pascal_to_dromedary_case, snake_to_dromedary_case, to_dromedary_case
def test_snake_to_dromedary_case():
assert snake_to_dromedary_case("hello_world") == "helloWorld"
assert snake_to_dromedary_case("_hello_world") == "_helloWorld"
assert snake_to_dromedary_case("test") == "test"
def test_pascal_to_dromedary_case():
assert pascal_to_dromedary_case("HelloWorld") == "helloWorld"
assert pascal_to_dromedary_case("_HelloWorld") == "_helloWorld"
assert pascal_to_dromedary_case("Test") == "test"
def test_to_dromedary_case():
assert to_dromedary_case("hey_world") == "heyWorld"
assert to_dromedary_case("HEY_WORLD") == "heyWorld"
assert to_dromedary_case("Hey_World") == "heyWorld"
assert to_dromedary_case("heyWorld") == "heyWorld"
assert to_dromedary_case("HeyWorld") == "heyWorld"
| 38.818182
| 91
| 0.759953
| 108
| 854
| 5.490741
| 0.148148
| 0.315346
| 0.430017
| 0.177066
| 0.765599
| 0.765599
| 0.765599
| 0.625632
| 0.625632
| 0.625632
| 0
| 0
| 0.124122
| 854
| 21
| 92
| 40.666667
| 0.792781
| 0
| 0
| 0
| 0
| 0
| 0.216628
| 0
| 0
| 0
| 0
| 0
| 0.733333
| 1
| 0.2
| true
| 0
| 0.066667
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bce75490fd72d412ede0585ef73d13e2a3122981
| 10,827
|
py
|
Python
|
heat/spatial/tests/test_distances.py
|
mtar/heat
|
35aac8c0aaafa2dcb350ad86514e61da9ee05a50
|
[
"MIT"
] | null | null | null |
heat/spatial/tests/test_distances.py
|
mtar/heat
|
35aac8c0aaafa2dcb350ad86514e61da9ee05a50
|
[
"MIT"
] | 1
|
2020-07-29T08:01:09.000Z
|
2020-07-29T08:10:41.000Z
|
heat/spatial/tests/test_distances.py
|
mtar/heat
|
35aac8c0aaafa2dcb350ad86514e61da9ee05a50
|
[
"MIT"
] | null | null | null |
import unittest
import os
import torch
import heat as ht
import numpy as np
import math
from heat.core.tests.test_suites.basic_test import TestCase
class TestDistances(TestCase):
def test_cdist(self):
n = ht.communication.MPI_WORLD.size
X = ht.ones((n * 2, 4), dtype=ht.float32, split=None)
Y = ht.zeros((n * 2, 4), dtype=ht.float32, split=None)
res_XX_cdist = ht.zeros((n * 2, n * 2), dtype=ht.float32, split=None)
res_XX_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=None)
res_XX_manhattan = ht.zeros((n * 2, n * 2), dtype=ht.float32, split=None)
res_XY_cdist = ht.ones((n * 2, n * 2), dtype=ht.float32, split=None) * 2
res_XY_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=None) * math.exp(-1.0)
res_XY_manhattan = ht.ones((n * 2, n * 2), dtype=ht.float32, split=None) * 4
# Case 1a: X.split == None, Y == None
d = ht.spatial.cdist(X, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XX_cdist))
self.assertEqual(d.split, None)
d = ht.spatial.cdist(X, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XX_cdist))
self.assertEqual(d.split, None)
d = ht.spatial.rbf(X, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XX_rbf))
self.assertEqual(d.split, None)
d = ht.spatial.rbf(X, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XX_rbf))
self.assertEqual(d.split, None)
d = ht.spatial.manhattan(X, expand=False)
self.assertTrue(ht.equal(d, res_XX_manhattan))
self.assertEqual(d.split, None)
d = ht.spatial.manhattan(X, expand=True)
self.assertTrue(ht.equal(d, res_XX_manhattan))
self.assertEqual(d.split, None)
# Case 1b: X.split == None, Y != None, Y.split == None
d = ht.spatial.cdist(X, Y, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, None)
d = ht.spatial.cdist(X, Y, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, None)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, None)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, None)
d = ht.spatial.manhattan(X, Y, expand=False)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, None)
d = ht.spatial.manhattan(X, Y, expand=True)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, None)
# Case 1c: X.split == None, Y != None, Y.split == 0
Y = ht.zeros((n * 2, 4), dtype=ht.float32, split=0)
res_XX_cdist = ht.zeros((n * 2, n * 2), dtype=ht.float32, split=1)
res_XX_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=1)
res_XY_cdist = ht.ones((n * 2, n * 2), dtype=ht.float32, split=1) * 2
res_XY_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=1) * math.exp(-1.0)
d = ht.spatial.cdist(X, Y, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 1)
d = ht.spatial.cdist(X, Y, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 1)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 1)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 1)
d = ht.spatial.manhattan(X, Y, expand=False)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 1)
d = ht.spatial.manhattan(X, Y, expand=True)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 1)
# Case 2a: X.split == 0, Y == None
X = ht.ones((n * 2, 4), dtype=ht.float32, split=0)
Y = ht.zeros((n * 2, 4), dtype=ht.float32, split=None)
res_XX_cdist = ht.zeros((n * 2, n * 2), dtype=ht.float32, split=0)
res_XX_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=0)
res_XY_cdist = ht.ones((n * 2, n * 2), dtype=ht.float32, split=0) * 2
res_XY_rbf = ht.ones((n * 2, n * 2), dtype=ht.float32, split=0) * math.exp(-1.0)
d = ht.spatial.cdist(X, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XX_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.cdist(X, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XX_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XX_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XX_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, expand=False)
self.assertTrue(ht.equal(d, res_XX_manhattan))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, expand=True)
self.assertTrue(ht.equal(d, res_XX_manhattan))
self.assertEqual(d.split, 0)
# Case 2b: X.split == 0, Y != None, Y.split == None
d = ht.spatial.cdist(X, Y, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.cdist(X, Y, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, Y, expand=False)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, Y, expand=True)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 0)
# Case 2c: X.split == 0, Y != None, Y.split == 0
Y = ht.zeros((n * 2, 4), dtype=ht.float32, split=0)
d = ht.spatial.cdist(X, Y, quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.cdist(X, Y, quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_cdist))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=False)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.rbf(X, Y, sigma=math.sqrt(2.0), quadratic_expansion=True)
self.assertTrue(ht.equal(d, res_XY_rbf))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, Y, expand=False)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 0)
d = ht.spatial.manhattan(X, Y, expand=True)
self.assertTrue(ht.equal(d, res_XY_manhattan))
self.assertEqual(d.split, 0)
# Case 3 X.split == 1
X = ht.ones((n * 2, 4), dtype=ht.float32, split=1)
with self.assertRaises(NotImplementedError):
ht.spatial.cdist(X)
with self.assertRaises(NotImplementedError):
ht.spatial.cdist(X, Y, quadratic_expansion=False)
X = ht.ones((n * 2, 4), dtype=ht.float32, split=None)
Y = ht.zeros((n * 2, 4), dtype=ht.float32, split=1)
with self.assertRaises(NotImplementedError):
ht.spatial.cdist(X, Y, quadratic_expansion=False)
Z = ht.ones((n * 2, 6, 3), dtype=ht.float32, split=None)
with self.assertRaises(NotImplementedError):
ht.spatial.cdist(Z, quadratic_expansion=False)
with self.assertRaises(NotImplementedError):
ht.spatial.cdist(X, Z, quadratic_expansion=False)
n = ht.communication.MPI_WORLD.size
A = ht.ones((n * 2, 6), dtype=ht.float32, split=None)
for i in range(n):
A[2 * i, :] = A[2 * i, :] * (2 * i)
A[2 * i + 1, :] = A[2 * i + 1, :] * (2 * i + 1)
res = torch.cdist(A._DNDarray__array, A._DNDarray__array)
A = ht.ones((n * 2, 6), dtype=ht.float32, split=0)
for i in range(n):
A[2 * i, :] = A[2 * i, :] * (2 * i)
A[2 * i + 1, :] = A[2 * i + 1, :] * (2 * i + 1)
B = A.astype(ht.int32)
d = ht.spatial.cdist(A, B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float32, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-5))
n = ht.communication.MPI_WORLD.size
A = ht.ones((n * 2, 6), dtype=ht.float32, split=None)
for i in range(n):
A[2 * i, :] = A[2 * i, :] * (2 * i)
A[2 * i + 1, :] = A[2 * i + 1, :] * (2 * i + 1)
res = torch.cdist(A._DNDarray__array, A._DNDarray__array)
A = ht.ones((n * 2, 6), dtype=ht.float32, split=0)
for i in range(n):
A[2 * i, :] = A[2 * i, :] * (2 * i)
A[2 * i + 1, :] = A[2 * i + 1, :] * (2 * i + 1)
B = A.astype(ht.int32)
d = ht.spatial.cdist(A, B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float32, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
B = A.astype(ht.float64)
d = ht.spatial.cdist(A, B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float64, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
B = A.astype(ht.int16)
d = ht.spatial.cdist(A, B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float32, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
d = ht.spatial.cdist(B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float32, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
B = A.astype(ht.int32)
d = ht.spatial.cdist(B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float32, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
B = A.astype(ht.float64)
d = ht.spatial.cdist(B, quadratic_expansion=False)
result = ht.array(res, dtype=ht.float64, split=0)
self.assertTrue(ht.allclose(d, result, atol=1e-8))
| 41.011364
| 91
| 0.599797
| 1,670
| 10,827
| 3.797006
| 0.052695
| 0.068128
| 0.067813
| 0.119224
| 0.955685
| 0.947012
| 0.942596
| 0.926983
| 0.917836
| 0.914367
| 0
| 0.033554
| 0.245774
| 10,827
| 263
| 92
| 41.1673
| 0.742959
| 0.0266
| 0
| 0.839196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.422111
| 1
| 0.005025
| false
| 0
| 0.035176
| 0
| 0.045226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bce825355621ff505a47b0bc3460fd9b6584cb49
| 184
|
py
|
Python
|
src/airfly/_vendor/airflow/contrib/operators/awsbatch_operator.py
|
ryanchao2012/airfly
|
230ddd88885defc67485fa0c51f66c4a67ae98a9
|
[
"MIT"
] | 7
|
2021-09-27T11:38:48.000Z
|
2022-02-01T06:06:24.000Z
|
src/airfly/_vendor/airflow/contrib/operators/awsbatch_operator.py
|
ryanchao2012/airfly
|
230ddd88885defc67485fa0c51f66c4a67ae98a9
|
[
"MIT"
] | null | null | null |
src/airfly/_vendor/airflow/contrib/operators/awsbatch_operator.py
|
ryanchao2012/airfly
|
230ddd88885defc67485fa0c51f66c4a67ae98a9
|
[
"MIT"
] | null | null | null |
# Auto generated by 'inv collect-airflow'
from airfly._vendor.airflow.providers.amazon.aws.operators.batch import AwsBatchOperator
class AWSBatchOperator(AwsBatchOperator):
pass
| 26.285714
| 88
| 0.820652
| 21
| 184
| 7.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103261
| 184
| 6
| 89
| 30.666667
| 0.909091
| 0.211957
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
4c3297a7d3683c597d02ef1f00c0ed301ab32d41
| 241,766
|
py
|
Python
|
untitled.py
|
czyczyyzc/MyForElise
|
dcbf5924d3d63f441d3247741828804f74a29345
|
[
"MIT"
] | null | null | null |
untitled.py
|
czyczyyzc/MyForElise
|
dcbf5924d3d63f441d3247741828804f74a29345
|
[
"MIT"
] | null | null | null |
untitled.py
|
czyczyyzc/MyForElise
|
dcbf5924d3d63f441d3247741828804f74a29345
|
[
"MIT"
] | null | null | null |
def _fast_hist(self, label_pred, label_true):
mask = (label_true >= 0) & (label_true < self.cls_num)
hist = np.bincount(self.cls_num*label_true[mask].astype(int)+label_pred[mask], minlength=self.cls_num**2).reshape(self.cls_num, self.cls_num)
return hist
def accs_seg_img_py(self, label_pred, label_true):
hist = self._fast_hist(label_pred.flatten(), label_true.flatten())
acc = np.diag(hist).sum() / hist.sum()
acc_cls = np.diag(hist) / hist.sum(axis=1)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum()
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
accs = np.stack([mean_iu, acc, acc_cls, fwavacc], axis=0)
accs = accs.astype(dtype=np.float32, copy=False)
return accs
def accs_seg_img(self, label_pred, label_true):
accs = tf.py_func(self.accs_seg_img_py, [label_pred, label_true], tf.float32)
return accs
msks_pst_= tf.nn.softmax(msks_pst, axis=-1) #(N, H, W, C)
msks_pre = gmks
msks = msks_pst_
if self.mod_tra:
los_dat = self.loss_seg(msks_pst, msks_pre)
los_reg = tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
los = los_dat + los_reg
loss = tf.stack([los, los_dat, los_reg], axis=0)
msks_pst0 = tf.argmax(msks_pst_, axis=-1, output_type=tf.int32) #(N, H, W) class
msks_pst1 = tf.reduce_max(msks_pst_, axis=-1) #(N, H, W) probs
msks_pst1 = msks_pst1 >= self.msk_min #(N, H, W)
msks_pst1 = tf.cast(msks_pst1, dtype=tf.int32) #(N, H, W)
msks_pst = msks_pst0 * msks_pst1 #(N, H, W)
accs = self.accs_seg(msks_pst, msks_pre)
return loss, accs, msks
else:
return msks
def fold1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, C]
with tf.variable_scope('fold1_'+str(layer)) as scope:
tensor_in = tf.reshape(tensor_in, [x_shape[0], x_shape[1]//2, 2, x_shape[2]//2, 2, x_shape[3]])
tensor_in = tf.transpose(tensor_in, [0, 1, 3, 2, 4, 5])
tensor_out = tf.reshape(tensor_in, [x_shape[0], x_shape[1]//2, x_shape[2]//2, x_shape[3]*4])
print_activations(tensor_out)
return tensor_out
def unfold1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, C]
with tf.variable_scope('unfold1_'+str(layer)) as scope:
tensor_in = tf.reshape(tensor_in, [x_shape[0], x_shape[1], x_shape[2], 2, 2, x_shape[3]//4])
tensor_in = tf.transpose(tensor_in, [0, 1, 3, 2, 4, 5])
tensor_out = tf.reshape(tensor_in, [x_shape[0], x_shape[1]*2, x_shape[2]*2, x_shape[3]//4])
print_activations(tensor_out)
return tensor_out
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
coord.request_stop()
coord.join(threads)
imgs_lst = []
lbls_lst = []
if self.fil_num >= self.gpu_num:
fil_pat = os.path.join(self.dat_dir, 'imagenet', '*.tfrecord')
dataset = tf.data.Dataset.list_files(file_pattern=fil_pat, shuffle=True, seed=None)
#dataset = dataset.interleave(lambda x: tf.data.TFRecordDataset(x, compression_type='ZLIB'), \
# cycle_length=self.num_readers, block_length=1, num_parallel_calls=1)
dataset = dataset.apply(tf.data.experimental.\
parallel_interleave(lambda x: tf.data.TFRecordDataset(x, compression_type='ZLIB'), \
cycle_length=self.num_readers, block_length=1, sloppy=True))
else:
fil_nam = glob.glob(os.path.join(self.dat_dir, 'imagenet', '*.tfrecord'))
dataset = tf.data.TFRecordDataset(fil_nam, compression_type='ZLIB', num_parallel_reads=self.num_readers)
for i in range(self.gpu_num):
dat_sha = dataset.shard(num_shards=self.gpu_num, index=i)
if self.fil_num >= self.gpu_num:
#dat_sha= dat_sha.shuffle(buffer_size=self.num_readers, seed=None, reshuffle_each_iteration=True)
dat_sha = dat_sha.prefetch(buffer_size=self.bat_siz)
dat_sha = dat_sha.map(parse_function, num_parallel_calls=self.num_threads)
dat_sha = dat_sha.apply(tf.data.experimental.\
shuffle_and_repeat(buffer_size=self.capacity, count=self.epc_num, seed=None))
dat_sha = dat_sha.batch(batch_size=self.bat_siz, drop_remainder=True)
#dat_sha = dat_sha.apply(tf.data.experimental.\
# map_and_batch(parse_function, batch_size=self.bat_siz, num_parallel_batches=None, \
# drop_remainder=True, num_parallel_calls=self.num_threads))
#dat_sha = dat_sha.cache(filename=os.path.join(self.dat_dir, 'cache'))
dat_sha = dat_sha.prefetch(buffer_size=1)
#dat_sha = dat_sha.apply(tf.data.experimental.prefetch_to_device(self.mdl_dev%i, buffer_size=1))
iterator = dat_sha.make_one_shot_iterator()
example = iterator.get_next()
imgs_lst.append(example['image/image'])
lbls_lst.append(example['label/label'])
return imgs_lst, lbls_lst
dataset = dataset.apply(tf.data.experimental.\
parallel_interleave(lambda x: tf.data.TFRecordDataset(x, compression_type='ZLIB'), \
cycle_length=self.num_readers, block_length=1, sloppy=True, \
buffer_output_elements=self.bat_siz_all//self.num_readers, \
prefetch_input_elements=None))
#random_uniform
'''
def get_input(self):
#创建文件列表,并通过文件列表创建输入文件队列。
#在调用输入数据处理流程前,需要统一所有原始数据的格式并将它们存储到TFRecord文件中
#文件列表应该包含所有提供训练数据的TFRecord文件
filename = os.path.join(self.dat_dir, 'cifar', '*.tfrecord')
files = tf.train.match_filenames_once(filename)
filename_queue = tf.train.string_input_producer(files, shuffle=True, capacity=1000)
#解析TFRecord文件里的数据
options = tf.python_io.TFRecordOptions(TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=options)
_, serialized_example = reader.read(filename_queue)
parsed_example = tf.parse_single_example(
serialized_example,
features = {
'image/image': tf.FixedLenFeature(shape=[], dtype=tf.string, default_value=None),
'image/height': tf.FixedLenFeature(shape=[], dtype=tf.int64, default_value=None),
'image/width': tf.FixedLenFeature(shape=[], dtype=tf.int64, default_value=None),
'label/label': tf.FixedLenFeature(shape=[], dtype=tf.int64, default_value=None),
#'matrix': tf.VarLenFeature(dtype=dtype('float32')),
#'matrix_shape':tf.FixedLenFeature(shape=(2,), dtype=tf.int64),
}
)
img_hgt = tf.cast(parsed_example['image/height'], tf.int32)
img_wdh = tf.cast(parsed_example['image/width'], tf.int32)
lbl = tf.cast(parsed_example['label/label'], tf.int32)
#img = tf.decode_raw(parsed_example['image/image'], tf.uint8)
img = tf.decode_raw(parsed_example['image/image'], tf.float32)
img = tf.reshape(img, [img_hgt, img_wdh, 3])
img = self.preprocessing(img)
img = tf.reshape(img, [self.img_siz_max, self.img_siz_max, 3])
capacity = self.min_after_dequeue + 3 * self.bat_siz
#tf.train.shuffle_batch_join
imgs, lbls = tf.train.shuffle_batch(
tensors=[img, lbl], batch_size=self.bat_siz, \
num_threads=self.num_threads, capacity=capacity, min_after_dequeue=self.min_after_dequeue)
return imgs, lbls
'''
def get_input(self):
#创建文件列表,并通过文件列表创建输入文件队列。
#在调用输入数据处理流程前,需要统一所有原始数据的格式并将它们存储到TFRecord文件中
#文件列表应该包含所有提供训练数据的TFRecord文件
filename = os.path.join(self.dat_dir, "*.tfrecord")
files = tf.train.match_filenames_once(filename)
filename_queue = tf.train.string_input_producer(files, shuffle=True, capacity=1000)
#解析TFRecord文件里的数据
options = tf.python_io.TFRecordOptions(TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=options)
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features = {
'image/img_id': tf.FixedLenFeature([], tf.int64 ),
'image/image': tf.FixedLenFeature([], tf.string),
'image/height': tf.FixedLenFeature([], tf.int64 ),
'image/width': tf.FixedLenFeature([], tf.int64 ),
'label/num_instances': tf.FixedLenFeature([], tf.int64 ),
'label/gt_masks': tf.FixedLenFeature([], tf.string),
'label/gt_boxes': tf.FixedLenFeature([], tf.string),
}
)
img_idx = tf.cast(features['image/img_id'], tf.int32)
img_hgt = tf.cast(features['image/height'], tf.int32)
img_wdh = tf.cast(features['image/width'], tf.int32)
gbx_num = tf.cast(features['label/num_instances'], tf.int32)
img = tf.decode_raw(features['image/image'], tf.uint8 )
gbxs = tf.decode_raw(features['label/gt_boxes'], tf.float32)
gmks = tf.decode_raw(features['label/gt_masks'], tf.uint8 )
img = tf.reshape(img, [img_hgt, img_wdh, 3])
gbxs = tf.reshape(gbxs, [gbx_num, 5])
gmks = tf.reshape(gmks, [gbx_num, img_hgt, img_wdh])
img, gbxs, gmks, img_wdw, img_hgt_, img_wdh_ = self.preprocessing(img, gbxs, gmks)
gbx_num = tf.shape(gbxs)[0]
paddings = [[0, self.max_num-gbx_num], [0, 0]]
gbxs = tf.pad(gbxs, paddings, "CONSTANT")
paddings = [[0, self.max_num-gbx_num], [0, 0], [0, 0]]
gmks = tf.pad(gmks, paddings, "CONSTANT")
img = tf.reshape(img, [self.img_siz_max, self.img_siz_max, 3])
gbxs = tf.reshape(gbxs, [self.max_num, 5])
gmks = tf.reshape(gmks, [self.max_num]+self.box_msk_siz)
capacity = self.min_after_dequeue + 3 * self.bat_siz
#tf.train.shuffle_batch_join
imgs, gbxs, gmks, gbx_nums, img_wdws, img_hgts_, img_wdhs_ = tf.train.shuffle_batch(
tensors=[img, gbxs, gmks, gbx_num, img_wdw, img_hgt_, img_wdh_], batch_size=self.bat_siz, \
num_threads=self.num_threads, capacity=capacity, min_after_dequeue=self.min_after_dequeue)
return imgs, gbxs, gmks, gbx_nums, img_wdws, img_hgts_, img_wdhs_
def fold1(tensor_in=None, layer=0, params=None, mtrain=None):
stride = params['fold']['stride'] #[[2, 2], [2, 2]]
use_crs = params['fold']['use_crs']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('fold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hws_srds = reduce(lambda x,y: x+y, stride )
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [ 2 + i for i in range(num_srds)]
wdh_dims = [num_srds + 3 + i for i in range(num_srds)]
hws_dims = [[hgt_dims[i], wdh_dims[i]] for i in range(num_srds)]
hws_dims = reduce(lambda x,y: x+y, hws_dims)
new_num = x_shape[3] * hws_srd_all
new_hgt = x_shape[1] // hgt_srd_all
new_wdh = x_shape[2] // wdh_srd_all
old_hgt = new_hgt * hgt_srd_all
old_wdh = new_wdh * wdh_srd_all
if old_hgt != x_shape[1] or old_wdh != x_shape[2]:
tensor_in = tensor_in[:, :old_hgt, :old_wdh, :]
#x_shape = get_shape(tensor_in)
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt] + hgt_srds + [new_wdh] + wdh_srds + [x_shape[3]])
tensor_in = tf.transpose(tensor_in, [0, 1, 2+num_srds] + hws_dims + [3+2*num_srds])
if use_crs:
for srd in stride:
assert srd[0] == srd[1] == 2, 'Invalid stride for cross position!'
indices = np.arange(hws_srd_all)
indices = np.reshape(indices, [4 for _ in range(len(stride))])
for i in range(len(stride)):
indices = np.take(indices, [0,3,1,2], axis=i)
indices = np.reshape(indices, [-1])
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, hws_srd_all, x_shape[3]])
tensor_in = tf.gather(tensor_in, indices, axis=3)
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num])
print_activations(tensor_out)
return tensor_out
def unfold1(tensor_in=None, layer=0, params=None, mtrain=None):
stride = params['unfold']['stride']
use_crs = params['unfold']['use_crs']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('unfold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hws_srds = reduce(lambda x,y: x+y, stride )
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [3 + 2 * i for i in range(num_srds)]
wdh_dims = [4 + 2 * i for i in range(num_srds)]
new_num = x_shape[3] // hws_srd_all
new_hgt = x_shape[1] * hgt_srd_all
new_wdh = x_shape[2] * wdh_srd_all
old_num = new_num * hws_srd_all
if old_num != x_shape[3]:
tensor_in = tensor_in[:, :, :, :old_num]
#x_shape = get_shape(tensor_in)
if use_crs:
for srd in stride:
assert srd[0] == srd[1] == 2, 'Invalid stride for cross position!'
indices = np.arange(hws_srd_all)
indices = np.reshape(indices, [4 for _ in range(len(stride))])
for i in range(len(stride)):
indices = np.take(indices, [0,2,3,1], axis=i)
indices = np.reshape(indices, [-1])
tensor_in = tf.reshape(tensor_in, x_shape[0:3] + [hws_srd_all] + [new_num])
tensor_in = tf.gather(tensor_in, indices, axis=3)
tensor_in = tf.reshape(tensor_in, x_shape[0:3] + hws_srds + [new_num])
tensor_in = tf.transpose(tensor_in, [0,1] + hgt_dims + [2] + wdh_dims + [3+2*num_srds])
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num])
print_activations(tensor_out)
return tensor_out
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)] #[h, w]
shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
shape_q = [shape[0]*shape[1]*shape[2]*shape[3], shape[4]*shape[5]] #[h*w*M*C, M'*C']
shape_k = shape[0:4] + [shape[5]] #[h, w, M, C, C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weights = tf.get_variable(name='weights', shape=shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h, w, M, C, M', C')
weight_q = tf.reshape(weights, shape_q) #(h*w*M*C, M'*C')
weight_k = tf.reduce_sum(weights, axis=4) #(h, w, M, C, C')
if use_bias:
biases = tf.get_variable(name='biases', shape=number, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable) #(M', C')
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[1] / stride[0]))
new_wdh = int(np.ceil(x_shape[2] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[1]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[2]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[0, 0], [pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[1] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[2] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [x_shape[0], new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[1]*y_shape[2], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=[y_shape[0]]+number, colocate_with_first_write_call=True) #(H*W, N, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[1]*y_shape[2])
return c
def body(i, tensor_out):
ymn = i // y_shape[2] * stride[0]
xmn = i % y_shape[2] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[:, ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(N, h, w, M, C)
fett = tf.reshape(fetx, [y_shape[0], -1]) #(N, h*w*M*C)
fetq = tf.matmul(fett, weight_q) #(N, M'*C') (N, h*w*M*C) (h*w*M*C, M'*C')
fetq = tf.reshape(fetq, [y_shape[0]]+number) #(N, M', C')
fett = tf.transpose(fetx, [1, 2, 3, 0, 4]) #(h, w, M, N, C)
fetk = tf.matmul(fett, weight_k) #(h, w, M, N, C') (h, w, M, N, C) (h, w, M, C, C')
fetk = tf.transpose(fetk, [3, 0, 1, 2, 4]) #(N, h, w, M, C')
fetk = tf.reshape(fetk, [y_shape[0], -1, number[1]]) #(N, h*w*M, C')
atts = tf.matmul(fetq, fetk, transpose_b=True) #(N, M', h*w*M)
atts = atts / np.sqrt(number[1]) #(N, M', h*w*M)
atts = tf.nn.softmax(atts, axis=-1) #(N, M', h*w*M)
fetk = tf.matmul(atts, fetk) #(N, M', C') (N, M', h*w*M) (N, h*w*M, C')
fetq = fetq + fetk #(N, M', C')
fetq = fetq + biases if use_bias else fetq #(N, M', C')
tensor_out = tensor_out.write(i, fetq) #(H'*W', N, M', C')
return [i+1, tensor_out]
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=y_shape[1]*y_shape[2], back_prop=True, swap_memory=False)
tensor_out = tensor_out.stack() #(H'*W', N, M', C')
tensor_out = tf.transpose(tensor_out, [1, 0, 2, 3]) #(N, H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
number = params['proj']['number'] #[b, r, c']
shape = params['proj']['shape']
rate = params['proj']['rate']
stride = params['proj']['stride']
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj1_'+str(layer)) as scope:
x_shape = get_shape(tensor_in) #[N, H, W, C]
tensor_in = tf.reshape(tensor_in, x_shape[:3]+number[0:2]+\
[x_shape[3]//number[0]//number[1]]) #(N, H, W, b, r, c)
tensor_in = tf.transpose(tensor_in, [0, 3, 4, 1, 2, 5]) #(N, b, r, H, W, c)
x_shape = get_shape(tensor_in) #[N, b, r, H, W, c]
y_shape = x_shape[:5] + [number[2]] #[N, b, r, H, W, c']
tensor_in = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]*x_shape[2]]+x_shape[3:6]) #(N*b*r, H, W, c)
params['conv'] = {'number':number[2], 'shape':shape, 'rate':rate, \
'stride':stride, 'padding':padding, 'use_bias':use_bias}
tensor_out = conv1(tensor_in, 0, params, mtrain) #(N*b*r, H, W, c')
tensor_out = tf.reshape(tensor_out, y_shape) #[N, b, r, H, W, c']
tensor_out = tf.transpose(tensor_out, [0, 3, 4, 1, 2, 5]) #(N, H, W, b, r, c')
y_shape = get_shape(tensor_out) #[N, H, W, b, r, c']
tensor_out = tf.reshape(tensor_out, y_shape[0:3]+[y_shape[3]*y_shape[4]*y_shape[5]]) #(N, H, W, b*r*c')
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
def proj_bn1(tensor_in=None, layer=0, params=None, mtrain=None):
params['proj']['use_bias'] = False
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj_bn1_'+str(layer)) as scope:
proj = proj1(tensor_in, 0, params, mtrain)
tensor_out = batchnorm1(proj, 0, params, mtrain)
return tensor_out
def proj_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj_relu1_'+str(layer)) as scope:
proj = proj1(tensor_in, 0, params, mtrain)
tensor_out = relu1(proj, 0, params, mtrain)
return tensor_out
def proj_bn_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj_bn_relu1_'+str(layer)) as scope:
bn = proj_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(bn, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
use_fold = params['group_unit']['use_fold']
number = params['group_unit']['number'] #[[b, r, c], [b, r, c], [b, r, c]]
shape = params['group_unit']['shape']
rate = params['group_unit']['rate']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if use_fold:
params['fold'] = {'stride':[[2,2]], 'use_crs':False}
tensor_in = fold1(tensor_in, 0, params, mtrain)
params['proj'] = {'number':number[0], 'shape':[1,1], 'rate':[1,1], 'stride':[1,1], \
'padding':'VALID', 'use_bias':False}
residual = proj_bn_relu1(tensor_in, 0, params, mtrain)
params['proj'] = {'number':number[1], 'shape':shape, 'rate':rate, 'stride':[1,1], \
'padding':'SAME', 'use_bias':False}
residual = proj_bn_relu1(residual, 1, params, mtrain)
params['proj'] = {'number':number[2], 'shape':[1,1], 'rate':[1,1], 'stride':[1,1], \
'padding':'VALID', 'use_bias':False}
residual = proj_bn1(residual, 0, params, mtrain)
tensor_out = tensor_in + residual
tensor_out = relu1(tensor_out, 0, params, mtrain)
return tensor_out
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
number, shape, rate, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
if j == 0: #the first unit in the block
params['group_unit'] = {'use_fold':True ,'number':number, 'shape':shape, 'rate':rate}
else: #identity mapping
params['group_unit'] = {'use_fold':False,'number':number, 'shape':shape, 'rate':rate}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
#the group block setting
#depth_bottle, depth_output, shape, rate, unit_number, unit_trainable
#不管输入的特征有多么弱,我们认为它也应该产生一个对输出的完整描述
#尽管低级特征对低级属性的描述更强,但是它也应该有对更高级属性的完整描述能力
#况且在层数增加的过程中,特征的低级属性会被弱化,而高级属性不断被增强
#256 #1 * 256 --> 1 * 64 | #64
#1024 #4 * 256 --> 4 * 64 | #256
#4096 #16 * 256 --> 16 * 64 | #1024 #1 * 1024 --> 1 * 256 | #256
#16384 #64 * 256 --> 64 * 64 | #4096 #4 * 1024 --> 4 * 256 | #1024
#65536 #256 * 256 --> 256 * 64 | #16384 #16 * 1024 --> 16 * 256 | #4096 #1 * 4096 --> 1 * 1024 | #1024
#262144 #1024 * 256 --> 1024 * 64 | #65536 #64 * 1024 --> 64 * 256 | #16384 #4 * 4096 --> 4 * 1024 | #4096
self.grp_set = [([[[1, 64]], [[1, 256]], ], [2,2], [1,1], 3, True ),
([[[4, 64]], [[4, 256]], ], [2,2], [1,1], 4, True ),
([[[16,64],[1,256]], [[1,1024],[16,256]]], [2,2], [1,1], 6, True ),
([[[64,64],[4,256]], [[4,1024],[64,256]]], [2,2], [1,1], 3, True )]
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
number = params['proj']['number'] #[b, c']
shape = params['proj']['shape']
rate = params['proj']['rate']
stride = params['proj']['stride']
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj1_'+str(layer)) as scope:
x_shape = get_shape(tensor_in) #[N, H, W, C]
tensor_in = tf.reshape(tensor_in, x_shape[:3]+[number[0],x_shape[3]//number[0]]) #(N, H, W, b, c)
tensor_in = tf.transpose(tensor_in, [0, 3, 1, 2, 4]) #(N, b, H, W, c)
x_shape = get_shape(tensor_in) #[N, b, H, W, c]
y_shape = x_shape[:4] + [number[1]] #[N, b, H, W, c']
tensor_in = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]]+x_shape[2:4]+\
[x_shape[4]]) #(N*b, H, W, c)
params['conv'] = {'number':number[1], 'shape':shape, 'rate':rate, \
'stride':stride, 'padding':padding, 'use_bias':use_bias}
tensor_out = conv1(tensor_in, 0, params, mtrain) #(N*b, H, W, c')
tensor_out = tf.reshape(tensor_out, y_shape) #[N, b, H, W, c']
tensor_out = tf.transpose(tensor_out, [0, 2, 3, 1, 4]) #(N, H, W, b, c')
y_shape = get_shape(tensor_out) #[N, H, W, b, c']
tensor_out = tf.reshape(tensor_out, y_shape[0:3]+[y_shape[3]*y_shape[4]]) #(N, H, W, b*c')
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
use_fold = params['group_unit']['use_fold']
number = params['group_unit']['number'] #[[[b, c'], [b, c'], [b, c']], [[b, c'], [b, c'], [b, c']], c"]
shape = params['group_unit']['shape']
rate = params['group_unit']['rate']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if use_fold:
params['fold'] = {'stride':[[2,2]], 'use_crs':False}
tensor_in = fold1(tensor_in, 0, params, mtrain)
x_shape = get_shape(tensor_in) #[N, H, W, C]
shrink = [tensor_in]
#维度收缩
#不管输入的特征有多么弱,我们认为它也应该产生一个对输出的完整描述
#尽管低级特征对低级属性的描述更强,但是它也应该有对更高级属性的完整描述能力
#况且在层数增加的过程中,特征的低级属性会被弱化,而高级属性不断被增强
residual = tensor_in
for i, depth in enumerate(number[0]): #[b, c']
params['proj'] = {'number':depth, 'shape':[1,1], 'rate':[1,1], 'stride':[1,1], \
'padding':'VALID', 'use_bias':False}
residual = proj_bn_relu1(residual, 0+i, params, mtrain)
shrink.append(residual)
shrink = shrink[:-1]
shrink = shrink[::-1]
#全局关联
r_shape = get_shape(residual)
params['conv'] = {'number':r_shape[3], 'shape':shape, 'rate':rate, 'stride':[1,1], 'padding':'SAME'}
residual = conv_bn_relu1(residual, 0, params, mtrain)
#维度伸展
for i, depth in enumerate(number[1]): #[b, c']
params['proj'] = {'number':depth, 'shape':[1,1], 'rate':[1,1], 'stride':[1,1], \
'padding':'VALID', 'use_bias':False}
residual = proj_bn1(residual, 0+i, params, mtrain)
residual = residual + shrink[i]
residual = relu1(residual, 0+i, params, mtrain)
tensor_out = residual
return tensor_out
#64 | #256 #1 * 256 --> 1 * 64 | #64
#128 | #1024 #4 * 256 --> 4 * 64 | #256
#256 | #4096 #16 * 256 --> 16 * 64 | #1024 #2 * 512 --> 2 * 128 | #256
#512 | #16384 #64 * 256 --> 64 * 64 | #4096 #8 * 512 --> 8 * 128 | #1024
#1024 | #65536 #256 * 256 --> 256 * 64 | #16384 #32 * 512 --> 32 * 128 | #4096 #4 * 1024 --> 4 * 256 | #1024
#2048 | #262144 #1024 * 256 --> 1024 * 64 | #65536 #128 * 512 --> 128 * 128 | #16384 #16 * 1024 --> 16 * 256 | #4096
self.grp_set = [([[[1, 64]], [[1,256]], 64], [3,3], [1,1], 3, True ),
([[[4, 64]], [[4,256]], 128], [3,3], [1,1], 4, True ),
([[[16,64],[2,128]], [[2,512],[16,256]], 256], [3,3], [1,1], 6, True ),
([[[64,64],[8,128]], [[8,512],[64,256]], 512], [3,3], [1,1], 3, True )]
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
q_shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
k_shape = shape + x_shape[3:] + [number[1]] #[1, h, w, M, C, C']
q_shape = [reduce(lambda x,y: x*y, q_shape[0:4]), q_shape[4]*q_shape[5]] #[h*w*M*C, M'*C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weight_q = tf.get_variable(name='weight_q', shape=q_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h*w*M*C, M'*C')
weight_k = tf.get_variable(name='weight_k', shape=k_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h, w, M, C, C')
if use_bias:
biase_q = tf.get_variable(name='biase_q', shape=q_shape[-1], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
biase_k = tf.get_variable(name='biase_k', shape=k_shape[-1], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
def attn_img(tensor_in):
x_shape = get_shape(tensor_in) #[H, W, M, C]
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[1] / stride[0]))
new_wdh = int(np.ceil(x_shape[2] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[1]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[2]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[1] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[2] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [x_shape[0], new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[0]*y_shape[1], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=number, colocate_with_first_write_call=True) #(H*W, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[1]*y_shape[2])
return c
def body(i, tensor_out):
ymn = i // y_shape[2] * stride[0]
xmn = i % y_shape[2] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[:, ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(N, h, w, M, C)
fett = tf.reshape(fetx, [y_shape[0], -1]) #(N, h*w*M*C)
fetq = tf.matmul(fett, weight_q) #(N, M'*C') (N, h*w*M*C) (h*w*M*C, M'*C')
fetq = fetq + biase_q if use_bias else fetq #(N, M'*C')
fetq = tf.reshape(fetq, [y_shape[0]]+number) #(N, M', C')
fett = tf.expand_dims(fetx, axis=3) #(N, h, w, M, C)
fetk = tf.matmul(fett, weight_k) #(h, w, M, 1, C') (h, w, M, 1, C) (h, w, M, C, C')
fetk = fetk + biase_k if use_bias else fetk #(h, w, M, 1, C')
fetk = tf.reshape(fetk, [-1, number[1]]) #(h*w*M, C')
atts = tf.matmul(fetq, fetk, transpose_b=True) #(M', h*w*M)
atts = atts / np.sqrt(number[1]) #(M', h*w*M)
atts = tf.nn.softmax(atts, axis=-1) #(M', h*w*M)
fetk = tf.matmul(atts, fetk) #(M', C') (M', h*w*M) (h*w*M, C')
fetq = fetq + fetk #(M', C')
tensor_out = tensor_out.write(i, fetq) #(H'*W', M', C')
return [i+1, tensor_out]
#pra_itrs = max(y_shape[0] * y_shape[1] // 64, 16)
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=10, back_prop=True, swap_memory=False)
tensor_out = tensor_out.stack() #(H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(H', W', M', C')
return tensor_out
tensor_out = tf.map_fn(attn_img, tensor_in, dtype=tf.float32, parallel_iterations=x_shape[0], \
back_prop=True, swap_memory=False, infer_shape=True) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M*C]
'''
number = params['proj']['number']
shape = params['proj']['shape']
rate = params['proj']['rate']
stride = params['proj']['stride']
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
use_attn = params['proj']['use_attn']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj1_'+str(layer)) as scope:
x_shape = get_shape(tensor_in) #[N, H, W, M*C]
tensor_in = tf.reshape(tensor_in, x_shape[:3]+number[0]) #(N, H, W, b, r, m, c)
tensor_in = tf.transpose(tensor_in, [0, 4, 1, 2, 3, 5, 6]) #(N, r, H, W, b, m, c)
x_shape = get_shape(tensor_in) #[N, r, H, W, b, m, c]
y_shape = x_shape[:4] + number[1] #[N, r, H, W, b', m', c']
if use_attn:
tensor_in = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]]+x_shape[2:4]+\
[x_shape[4]*x_shape[5], x_shape[6]]) #(N*r, H, W, b*m, c)
params['attn'] = {'number':[number[1][0]*number[1][1], number[1][2]], 'shape':shape, 'rate':rate, \
'stride':stride, 'padding':padding, 'use_bias':use_bias}
tensor_out = attn1(tensor_in, 0, params, mtrain) #(N*r, H, W, b'*m', c')
else:
tensor_in = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]]+x_shape[2:4]+\
[x_shape[4]*x_shape[5]*x_shape[6]]) #(N*r, H, W, b'*m'*c')
params['conv'] = {'number':number[1][0]*number[1][1]*number[1][2], 'shape':shape, 'rate':rate, \
'stride':stride, 'padding':padding, 'use_bias':use_bias}
tensor_out = conv1(tensor_in, 0, params, mtrain) #(N*r, H, W, b'*m'*c')
tensor_out = tf.reshape(tensor_out, y_shape) #(N, r, H, W, b', m', c')
tensor_out = tf.transpose(tensor_out, [0, 2, 3, 4, 1, 5, 6]) #(N, H, W, b', r, m', c')
y_shape = get_shape(tensor_out) #[N, H, W, b', r, m', c']
tensor_out = tf.reshape(tensor_out, y_shape[0:3] + \
[y_shape[3]*y_shape[4]*y_shape[5]*y_shape[6]]) #(N, H, W, b'*r*m'*c')
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
self.grp_set = [([[[ 4,1,1,64],[1,1,64]], [[1,1,1,64],[1,1,64]], [[1,1,1,64],[ 4,1,64]]], 3, True ), # 4
([[[ 8,2,1,64],[2,1,64]], [[1,2,2,64],[1,2,64]], [[2,2,1,64],[ 8,1,64]]], 4, True ), # 16
([[[16,4,1,64],[4,1,64]], [[1,4,4,64],[1,4,64]], [[4,4,1,64],[16,1,64]]], 6, True ), # 64
([[[32,8,1,64],[8,1,64]], [[1,8,8,64],[1,8,64]], [[8,8,1,64],[32,1,64]]], 3, True )] # 256
#the group block setting
self.grp_set = [([[[ 4,1,1,64],[1,1,64]], [[1,1,1,64],[1,1,64]], [[1,1,1,64],[ 4,1,64]]], 3, True ), # 4
([[[ 8,2,1,64],[2,1,64]], [[1,2,2,64],[1,2,64]], [[2,2,1,64],[ 8,1,64]]], 4, True ), # 16
([[[16,4,1,64],[4,1,64]], [[1,4,4,64],[1,4,64]], [[4,4,1,64],[16,1,64]]], 6, True ), # 64
([[[32,8,1,64],[8,1,64]], [[1,8,8,64],[1,8,64]], [[8,8,1,64],[32,1,64]]], 3, True )] # 256
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
shape_q = [reduce(lambda x,y: x*y, shape[0:4]), shape[4]*shape[5]] #[h*w*M*C, M'*C']
shape_k = shape[0:4] + [shape[5]] #[h, w, M, C, C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weights = tf.get_variable(name='weights', shape=shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h, w, M, C, M', C')
weight_q = tf.reshape(weights, shape_q) #(h*w*M*C, M'*C')
weight_k = tf.reduce_sum(weights, axis=4) #(h, w, M, C, C')
if use_bias:
biases = tf.get_variable(name='biases', shape=number, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable) #(M', C')
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[1] / stride[0]))
new_wdh = int(np.ceil(x_shape[2] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[1]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[2]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[0, 0], [pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[1] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[2] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [x_shape[0], new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[1]*y_shape[2], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=[y_shape[0]]+number, colocate_with_first_write_call=True) #(H*W, N, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[1]*y_shape[2])
return c
def body(i, tensor_out):
ymn = i // y_shape[2] * stride[0]
xmn = i % y_shape[2] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[:, ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(N, h, w, M, C)
fett = tf.reshape(fetx, [y_shape[0], -1]) #(N, h*w*M*C)
fetq = tf.matmul(fett, weight_q) #(N, M'*C') (N, h*w*M*C) (h*w*M*C, M'*C')
fetq = tf.reshape(fetq, [y_shape[0]]+number) #(N, M', C')
fett = tf.transpose(fetx, [1, 2, 3, 0, 4]) #(h, w, M, N, C)
fetk = tf.matmul(fett, weight_k) #(h, w, M, N, C') (h, w, M, N, C) (h, w, M, C, C')
fetk = tf.transpose(fetk, [3, 0, 1, 2, 4]) #(N, h, w, M, C')
fetk = tf.reshape(fetk, [y_shape[0], -1, number[1]]) #(N, h*w*M, C')
atts = tf.matmul(fetq, fetk, transpose_b=True) #(N, M', h*w*M)
atts = atts / np.sqrt(number[1]) #(N, M', h*w*M)
atts = tf.nn.softmax(atts, axis=-1) #(N, M', h*w*M)
fetk = tf.matmul(atts, fetk) #(N, M', C') (N, M', h*w*M) (N, h*w*M, C')
fetq = fetq + fetk #(N, M', C')
fetq = fetq + biases if use_bias else fetq #(N, M', C')
tensor_out = tensor_out.write(i, fetq) #(H'*W', N, M', C')
return [i+1, tensor_out]
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=y_shape[1]*y_shape[2], back_prop=True, swap_memory=False)
tensor_out = tensor_out.stack() #(H'*W', N, M', C')
tensor_out = tf.transpose(tensor_out, [1, 0, 2, 3]) #(N, H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
num_output, num_bottle, rate, stride, use_attn, use_drop, kep_prob, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
if j == 0: #the first unit in the block
params['group_unit'] = {'num_output':num_output, 'num_bottle':num_bottle, 'rate':rate, 'stride':stride, \
'use_attn':use_attn, 'use_drop':use_drop, 'kep_prob':kep_prob}
else: #identity mapping
params['group_unit'] = {'num_output':num_output, 'num_bottle':num_bottle, 'rate':rate, 'stride':[1, 1], \
'use_attn':use_attn, 'use_drop':use_drop, 'kep_prob':kep_prob}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M*C]
1.丢弃或融合特征采用压缩通道中空间位置相邻的特征,而不是像普通CNN那样保留通道而丢弃空间特征。
2.CNN中丢弃或融合的特征未加以选择,未重视空间上的分布关系;CNN丢失和融合特征的时机不对,由于CNN对通道是完全连接的,
因此为了增加空间上的关联范围,必须提前使用池化或者步长卷积以丢失未成熟关联的空间特征。
3.向量神经元的长度应该由希望该向量神经元具有的表达能力的大小决定的,特征丢弃或融合的数量应该由所能用的参数量的大小决定。
4.压缩和膨胀是针对冗余特征进行的,而特征的连接与组合是针对具有差异性的特征进行的,这正是CNN的要点所在。但是这里的冗余只是针对形状而言的,
物体的不同的位置、形状细节恰恰存储在这些冗余特征里面,所以对冗余特征不能进行丢失,而只能进行压缩和膨胀。
'''
number0 = params['group_unit']['number0'] #[r, b, m, c, b', m', c']
number1 = params['group_unit']['number1'] #[r, b, m, c, b', m', c']
number2 = params['group_unit']['number2'] #[r, b, m, c, b', m', c']
rate = params['group_unit']['rate']
stride = params['group_unit']['stride']
use_attn = params['group_unit']['use_attn']
use_drop = params['group_unit']['use_drop']
kep_prob = params['group_unit']['kep_prob']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0] #(None, 256, 256, 1, 64)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(stride) > 1):
params['fold'] = {'stride':stride, 'use_crs':True}
tensor_in = fold1(tensor_in, 0, params, mtrain)
x_shape = get_shape(tensor_in)
if x_shape[3]*x_shape[4] != num_output[0]*num_output[1]*num_output[2]:
params['proj'] = {'number':num_output, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
shortcut = proj_bn1(tensor_in, 0, params, mtrain)
elif x_shape[3] != num_output[0]*num_output[1] or x_shape[4] != num_output[2]:
shortcut = tf.reshape(tensor_in, x_shape[0:3]+[num_output[0]*num_output[1], num_output[2]])
else:
shortcut = tensor_in
params['proj'] = {'number':num_bottle, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn_relu1(tensor_in, 0, params, mtrain)
params['proj'] = {'number':num_bottle, 'shape':[3,3], 'rate':rate[1]+[1,1], 'stride':[1,1,1,1], \
'padding':'SAME', 'use_bias':False, 'use_attn':use_attn}
residual = proj_bn_relu1(residual, 1, params, mtrain)
params['proj'] = {'number':num_output, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn1(residual, 1, params, mtrain)
tensor_out = residual + shortcut
tensor_out = relu1(tensor_out, 0, params, mtrain)
if use_drop:
y_shape = get_shape(tensor_out) #[N, H, W, M, C]
params['dropout'] = {'keep_p':kep_prob, 'shape':y_shape[0:4] + [1]}
tensor_out = dropout1(tensor_out, 0, params, mtrain)
return tensor_out
def fold1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
stride = params['fold']['stride'] #[[2, 2], [2, 2]]
use_crs = params['fold']['use_crs']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('fold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hws_srds = reduce(lambda x,y: x+y, stride )
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [ 2 + i for i in range(num_srds)]
wdh_dims = [num_srds + 3 + i for i in range(num_srds)]
hws_dims = [[hgt_dims[i], wdh_dims[i]] for i in range(num_srds)]
hws_dims = reduce(lambda x,y: x+y, hws_dims)
new_num = x_shape[3] * hws_srd_all
new_hgt = x_shape[1] // hgt_srd_all
new_wdh = x_shape[2] // wdh_srd_all
old_hgt = new_hgt * hgt_srd_all
old_wdh = new_wdh * wdh_srd_all
if old_hgt != x_shape[1] or old_wdh != x_shape[2]:
tensor_in = tensor_in[:, :old_hgt, :old_wdh, :, :]
#x_shape = get_shape(tensor_in)
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt] + hgt_srds + [new_wdh] + wdh_srds + x_shape[3:])
tensor_in = tf.transpose(tensor_in, [0, 1, 2+num_srds] + hws_dims + [3+2*num_srds, 4+2*num_srds])
if use_crs:
for srd in stride:
assert srd[0] == srd[1] == 2, 'Invalid stride for cross position!'
indices = np.arange(hws_srd_all)
indices = np.reshape(indices, [4 for _ in range(len(stride))])
for i in range(len(stride)):
indices = np.take(indices, [0,3,1,2], axis=i)
indices = np.reshape(indices, [-1])
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, hws_srd_all] + x_shape[3:])
tensor_in = tf.gather(tensor_in, indices, axis=3)
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num, x_shape[4]])
print_activations(tensor_out)
return tensor_out
def unfold1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
stride = params['unfold']['stride']
use_crs = params['unfold']['use_crs']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('unfold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hws_srds = reduce(lambda x,y: x+y, stride )
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [3 + 2 * i for i in range(num_srds)]
wdh_dims = [4 + 2 * i for i in range(num_srds)]
new_num = x_shape[3] // hws_srd_all
new_hgt = x_shape[1] * hgt_srd_all
new_wdh = x_shape[2] * wdh_srd_all
old_num = new_num * hws_srd_all
if old_num != x_shape[3]:
tensor_in = tensor_in[:, :, :, :old_num, :]
#x_shape = get_shape(tensor_in)
if use_crs:
for srd in stride:
assert srd[0] == srd[1] == 2, 'Invalid stride for cross position!'
indices = np.arange(hws_srd_all)
indices = np.reshape(indices, [4 for _ in range(len(stride))])
for i in range(len(stride)):
indices = np.take(indices, [0,2,3,1], axis=i)
indices = np.reshape(indices, [-1])
tensor_in = tf.reshape(tensor_in, x_shape[0:3] + [hws_srd_all] + [new_num, x_shape[4]])
tensor_in = tf.gather(tensor_in, indices, axis=3)
tensor_in = tf.reshape(tensor_in, x_shape[0:3] + hws_srds + [new_num, x_shape[4]])
tensor_in = tf.transpose(tensor_in, [0,1] + hgt_dims + [2] + wdh_dims + [3+2*num_srds, 4+2*num_srds])
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num, x_shape[4]])
print_activations(tensor_out)
return tensor_out
def proj_relu_dropout1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
params['dropout']['shape'] = x_shape[0:4] + [1] #[N, H, W, M, 1]
with tf.variable_scope('proj_relu_dropout1_'+str(layer)) as scope:
relu = proj_relu1(tensor_in, 0, params, mtrain)
tensor_out = dropout1(relu, 0, params, mtrain)
return tensor_out
def proj_bn_relu_dropout1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
params['dropout']['shape'] = x_shape[0:4] + [1] #[N, H, W, M, 1]
with tf.variable_scope('proj_relu_dropout1_'+str(layer)) as scope:
relu = proj_bn_relu1(tensor_in, 0, params, mtrain)
tensor_out = dropout1(relu, 0, params, mtrain)
return tensor_out
def conv6(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['conv']['number'] #[4, 64]
shape = params['conv']['shape'] #[3, 3]
rate = params['conv']['rate'] #[1, 1]
stride = params['conv']['stride'] #[1, 1]
padding = params['conv']['padding']
use_bias = params['conv']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape = tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
shape = shape + [x_shape[3]*x_shape[4], number[0]*number[1]]
stride = [1, stride[0], stride[1], 1]
rate = [1, rate[0], rate[1], 1]
with tf.variable_scope('conv6_'+str(layer), reuse=reuse) as scope:
kernel = tf.get_variable(name='weights', shape=shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32),
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=[number[0]*number[1]], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
tensor_in = tf.reshape(tensor_in, x_shape[0:3]+[x_shape[3]*x_shape[4]])
conv = tf.nn.conv2d(tensor_in, kernel, stride, padding=padding, dilations=rate)
if use_bias:
tensor_out = tf.nn.bias_add(conv, biases)
else:
tensor_out = conv
y_shape = get_shape(tensor_out) #[N, H', W', M'*C']
tensor_out = tf.reshape(tensor_out, y_shape[0:3]+number)
#tf.summary.histogram('conv', tensor_out)
print_activations(tensor_out)
return tensor_out
def conv_bn6(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
params['conv']['use_bias'] = False
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('conv_bn6_'+str(layer)) as scope:
conv = conv6(tensor_in, 0, params, mtrain)
y_shape = get_shape(conv)
conv = tf.reshape(conv, y_shape[0:3]+[y_shape[3]*y_shape[4]])
bn = batchnorm1(conv, 0, params, mtrain)
tensor_out = tf.reshape(bn, y_shape)
print_activations(tensor_out)
return tensor_out
def conv_relu6(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('conv_relu6_'+str(layer)) as scope:
conv = conv6(tensor_in, 0, params, mtrain)
tensor_out = relu1(conv, 0, params, mtrain)
return tensor_out
def conv_bn_relu6(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('conv_bn_relu6_'+str(layer)) as scope:
bn = conv_bn6(tensor_in, 0, params, mtrain)
tensor_out = relu1(bn, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
1.减少或融合特征采用压缩通道中空间位置相邻的特征,而不是像普通CNN那样保留通道而丢弃空间特征。
2.CNN中丢失和融合的特征未加以选择,未重视空间上的分布关系;CNN丢失和融合特征的时机不对,由于CNN对通道是完全连接的,
因此为了增加空间上的关联范围,必须提前使用池化或者步长卷积以丢失未成熟关联的空间特征。
3.向量神经元的长度应该由希望该向量神经元具有的表达能力的大小决定的。
'''
num_output = params['group_unit']['num_output'] #[4, 64]
num_bottle = params['group_unit']['num_bottle'] #[4, 16]
rate = params['group_unit']['rate']
stride = params['group_unit']['stride']
use_attn = params['group_unit']['use_attn']
use_drop = params['group_unit']['use_drop']
kep_prob = params['group_unit']['kep_prob']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0] #(None, 256, 256, 1, 64)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(stride) > 1):
params['fold'] = {'stride':stride, 'use_crs':True}
tensor_in = fold1(tensor_in, 0, params, mtrain)
x_shape = get_shape(tensor_in)
if x_shape[3] * x_shape[4] != num_output[0] * num_output[1]:
params['proj'] = {'number':num_output, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
shortcut = proj_bn1(tensor_in, 0, params, mtrain)
elif x_shape[3] != num_output[0] or x_shape[4] != num_output[1]:
shortcut = tf.reshape(tensor_in, x_shape[0:3]+num_output)
else:
shortcut = tensor_in
params['proj'] = {'number':num_bottle, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn_relu1(tensor_in, 0, params, mtrain)
params['proj'] = {'number':num_bottle, 'shape':[3,3], 'rate':rate[1]+[1,1], 'stride':[1,1,1,1], \
'padding':'SAME', 'use_bias':False, 'use_attn':use_attn}
residual = proj_bn_relu1(residual, 1, params, mtrain)
params['proj'] = {'number':num_output, 'shape':[1,1], 'rate':rate[0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn1(residual, 1, params, mtrain)
tensor_out = residual + shortcut
tensor_out = relu1(tensor_out, 0, params, mtrain)
if use_drop:
y_shape = get_shape(tensor_out) #[N, H, W, M, C]
params['dropout'] = {'keep_p':kep_prob, 'shape':y_shape[0:4] + [1]}
tensor_out = dropout1(tensor_out, 0, params, mtrain)
return tensor_out
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
q_shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
k_shape = shape + x_shape[3:] + [number[1]] #[h, w, M, C, C']
q_shape = [reduce(lambda x,y: x*y, q_shape[0:4]), q_shape[4]*q_shape[5]] #[h*w*M*C, M'*C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weight_q = tf.get_variable(name='weight_q', shape=q_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h*w*M*C, M'*C')
weight_k = tf.get_variable(name='weight_k', shape=k_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h, w, M, C, C')
if use_bias:
biase_q = tf.get_variable(name='biase_q', shape=q_shape[-1], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable) #(M'*C')
biase_k = tf.get_variable(name='biase_k', shape=k_shape[:3]+[1,k_shape[-1]], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable) #(h, w, M, 1, C')
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[1] / stride[0]))
new_wdh = int(np.ceil(x_shape[2] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[1]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[2]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[0, 0], [pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[1] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[2] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [x_shape[0], new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[1]*y_shape[2], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=[y_shape[0]]+number, colocate_with_first_write_call=True) #(H*W, N, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[1]*y_shape[2])
return c
def body(i, tensor_out):
ymn = i // y_shape[2] * stride[0]
xmn = i % y_shape[2] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[:, ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(N, h, w, M, C)
fett = tf.reshape(fetx, [y_shape[0], -1]) #(N, h*w*M*C)
fetq = tf.matmul(fett, weight_q) #(N, M'*C') (N, h*w*M*C) (h*w*M*C, M'*C')
fetq = fetq + biase_q if use_bias else fetq #(N, M'*C')
fetq = tf.reshape(fetq, [y_shape[0]]+number) #(N, M', C')
fett = tf.transpose(fetx, [1, 2, 3, 0, 4]) #(h, w, M, N, C)
fetk = tf.matmul(fett, weight_k) #(h, w, M, N, C') (h, w, M, N, C) (h, w, M, C, C')
fetk = fetk + biase_k if use_bias else fetk #(h, w, M, N, C')
fetk = tf.transpose(fetk, [3, 0, 1, 2, 4]) #(N, h, w, M, C')
fetk = tf.reshape(fetk, [y_shape[0], -1, number[1]]) #(N, h*w*M, C')
atts = tf.matmul(fetq, fetk, transpose_b=True) #(N, M', h*w*M)
atts = atts / np.sqrt(number[1]) #(N, M', h*w*M)
atts = tf.nn.softmax(atts, axis=-1) #(N, M', h*w*M)
fetk = tf.matmul(atts, fetk) #(N, M', C') (N, M', h*w*M) (N, h*w*M, C')
fetq = fetq + fetk #(N, M', C')
tensor_out = tensor_out.write(i, fetq) #(H'*W', N, M', C')
return [i+1, tensor_out]
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=y_shape[1]*y_shape[2], back_prop=True, swap_memory=False)
tensor_out = tensor_out.stack() #(H'*W', N, M', C')
tensor_out = tf.transpose(tensor_out, [1, 0, 2, 3]) #(N, H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
q_shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
k_shape = shape + x_shape[3:] + [number[1]] #[h, w, M, C, C']
q_shape = [reduce(lambda x,y: x*y, q_shape[0:4]), q_shape[4]*q_shape[5]] #[h*w*M*C, M'*C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weight_q = tf.get_variable(name='weight_q', shape=q_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h*w*M*C, M'*C')
weight_k = tf.get_variable(name='weight_k', shape=k_shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #(h, w, M, C, C')
if use_bias:
biase_q = tf.get_variable(name='biase_q', shape=q_shape[-1], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
biase_k = tf.get_variable(name='biase_k', shape=k_shape[-1], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
def attn_img(tensor_in):
x_shape = get_shape(tensor_in) #[H, W, M, C]
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[0] / stride[0]))
new_wdh = int(np.ceil(x_shape[1] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[0]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[1]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[0] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[1] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[0]*y_shape[1], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=number, colocate_with_first_write_call=True) #(H*W, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[0]*y_shape[1])
return c
def body(i, tensor_out):
ymn = i // y_shape[1] * stride[0]
xmn = i % y_shape[1] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(h, w, M, C)
fett = tf.reshape(fetx, [1, -1]) #(1, h*w*M*C)
fetq = tf.matmul(fett, weight_q) #(1, M'*C') (1, h*w*M*C) (h*w*M*C, M'*C')
fetq = fetq + biase_q if use_bias else fetq #(1, M'*C')
fetq = tf.reshape(fetq, number) #(M', C')
fett = tf.expand_dims(fetx, axis=3) #(h, w, M, 1, C)
fetk = tf.matmul(fett, weight_k) #(h, w, M, 1, C') (h, w, M, 1, C) (h, w, M, C, C')
fetk = fetk + biase_k if use_bias else fetk #(h, w, M, 1, C')
fetk = tf.reshape(fetk, [-1, number[1]]) #(h*w*M, C')
atts = tf.matmul(fetq, fetk, transpose_b=True) #(M', h*w*M)
atts = atts / np.sqrt(number[1]) #(M', h*w*M)
atts = tf.nn.softmax(atts, axis=-1) #(M', h*w*M)
fetk = tf.matmul(atts, fetk) #(M', C') (M', h*w*M) (h*w*M, C')
fetq = fetq + fetk #(M', C')
tensor_out = tensor_out.write(i, fetq) #(H'*W', M', C')
return [i+1, tensor_out]
#pra_itrs = max(y_shape[0] * y_shape[1] // 64, 16)
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=10, back_prop=True, swap_memory=False)
tensor_out = tensor_out.stack() #(H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(H', W', M', C')
return tensor_out
tensor_out = tf.map_fn(attn_img, tensor_in, dtype=tf.float32, parallel_iterations=10, \
back_prop=True, swap_memory=False, infer_shape=True) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['proj']['number'] #[4, 64]
shape = params['proj']['shape'] #[3, 3]
rate = params['proj']['rate'] #[b, 2, 1, 1]
stride = params['proj']['stride'] #[2, 2, 1, 1]
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
use_attn = params['proj']['use_attn']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('proj1_'+str(layer), reuse=reuse) as scope:
if np.any(np.asarray(stride[0:2]) > 1):
params['fold'] = {'stride':stride[0:2], 'use_crs':True}
tensor_in = fold1(tensor_in, 0, params, mtrain)
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
#block之内的联系紧密,block之外的联系松散
tensor_in = tf.reshape(tensor_in, x_shape[:3]+[rate[0],x_shape[3]//rate[0],x_shape[4]]) #(N, H, W, b, M', C)
tensor_in = tf.transpose(tensor_in, [3, 0, 1, 2, 4, 5]) #(b, N, H, W, M', C)
x_shape = get_shape(tensor_in) #[b, N, H, W, M', C]
#根据通道上的膨胀率,再次对向量神经元进行划分
tensor_in = tf.reshape(tensor_in, x_shape[:4]+[x_shape[4]//rate[1],rate[1],x_shape[5]]) #(b, N, H, W, M", r, C)
tensor_in = tf.transpose(tensor_in, [0, 5, 1, 2, 3, 4, 6]) #(b, r, N, H, W, M", C)
x_shape = get_shape(tensor_in) #[b, r, N, H, W, M", C]
y_shape = x_shape[0:5] + number #[b, r, N, H, W, M_, C_]
tensor_in = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]]+x_shape[2:]) #(b*r, N, H, W, M_, C_)
if use_attn:
params['attn'] = {'number':number, 'shape':shape, 'rate':rate[2:], 'stride':stride[2:], \
'padding':padding, 'use_bias':use_bias}
tensor_out = tf.map_fn(lambda x: attn1(x, 0, params, None), tensor_in, dtype=tf.float32, \
parallel_iterations=10, \
back_prop=True, swap_memory=False, infer_shape=True) #(b*r, N, H, W, M_, C_)
else:
params['conv'] = {'number':number, 'shape':shape, 'rate':rate[2:], 'stride':stride[2:], \
'padding':padding, 'use_bias':use_bias}
tensor_out = tf.map_fn(lambda x: conv6(x, 0, params, None), tensor_in, dtype=tf.float32, \
parallel_iterations=10, \
back_prop=True, swap_memory=False, infer_shape=True) #(b*r, N, H, W, M_, C_)
tensor_out = tf.reshape(tensor_out, y_shape) #(b, r, N, H, W, M_, C_)
tensor_out = tf.transpose(tensor_out, [2, 3, 4, 0, 5, 1, 6]) #(N, H, W, b, M_, r, C_)
y_shape = get_shape(tensor_out) #[N, H, W, b, M_, r, C_]
tensor_out = tf.reshape(tensor_out, y_shape[0:3] + \
[y_shape[3]*y_shape[4]*y_shape[5], y_shape[6]]) #(N, H, W, M*, C_)
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weights = tf.get_variable(name='weights', shape=shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=number, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
def attn_img(tensor_in):
x_shape = get_shape(tensor_in) #[H, W, M, C]
if padding == 'SAME':
new_hgt = int(np.ceil(x_shape[0] / stride[0]))
new_wdh = int(np.ceil(x_shape[1] / stride[1]))
pad_hgt_all = (new_hgt - 1) * stride[0] + m_shape[0] - x_shape[0]
pad_wdh_all = (new_wdh - 1) * stride[1] + m_shape[1] - x_shape[1]
pad_top = pad_hgt_all // 2
pad_btm = pad_hgt_all - pad_top
pad_lft = pad_wdh_all // 2
pad_rgt = pad_wdh_all - pad_lft
paddings = [[pad_top, pad_btm], [pad_lft, pad_rgt], [0, 0], [0, 0]]
tensor_in = tf.pad(tensor_in, paddings, mode='CONSTANT', constant_values=0)
x_shape = get_shape(tensor_in) #[H, W, M, C]
elif padding == 'VALID':
new_hgt = int(np.ceil((x_shape[0] - m_shape[0] + 1) / stride[0]))
new_wdh = int(np.ceil((x_shape[1] - m_shape[1] + 1) / stride[1]))
else:
raise ValueError('Invalid padding method!')
y_shape = [new_hgt, new_wdh] + number
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[0]*y_shape[1], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=number, colocate_with_first_write_call=True) #(H*W, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[0]*y_shape[1])
return c
def body(i, tensor_out):
ymn = i // y_shape[1] * stride[0]
xmn = i % y_shape[1] * stride[1]
ymx = ymn + m_shape[0]
xmx = xmn + m_shape[1]
fetx = tensor_in[ymn:ymx:rate[0], xmn:xmx:rate[1], :, :] #(h, w, M, C) (h, w, M, C, M', C')
fety = tf.einsum('ijkl,ijklmn->mn', fetx, weights) #(M', C')
fetz = tf.einsum('ijkl,ijklmn->ijkn', fetx, weights) #(h, w, M, C')
'''
fetx = tf.reshape(fetx, [1, -1]) #(1, h*w*M*C)
wgts = tf.reshape(weights, [-1, number[0]*number[1]]) #(h*w*M*C, M'*C')
fety = tf.matmul(fetx, wgts) #(1, M'*C')
fety = tf.reshape(fety, number) #(M', C')
'''
#每个向量神经元C都预测出了M'个向量神经元C',取这M'个向量神经元C'的均值C',作为向量神经元C预测值
#该预测值C'会比输入C更偏向于合理的输出,从而在和所有输入预测的输出做相似性度量时,会得到更明确的相似性值
atts = tf.einsum('ijkn,mn->ijkm', fetz, fety) #(h, w, M, M')
#softmax
#wgts= weights * atts[:, :, :, tf.newaxis, :, tf.newaxis] #(h, w, M, C, M', C')
fety = tf.einsum('ijkl,ijklmn->mn', fetx, wgts) + fety #(M', C')
if use_bias:
fety = fety + biases
tensor_out = tensor_out.write(i, fety) #(H'*W', M', C')
return [i+1, tensor_out]
#pra_itrs = max(y_shape[0] * y_shape[1] // 64, 16)
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=10, back_prop=True, swap_memory=True)
tensor_out = tensor_out.stack() #(H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(H', W', M', C')
return tensor_out
tensor_out = tf.map_fn(attn_img, tensor_in, dtype=tf.float32, parallel_iterations=10, \
back_prop=True, swap_memory=True, infer_shape=True) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def attn1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['attn']['number']
shape = params['attn']['shape']
rate = params['attn']['rate']
stride = params['attn']['stride']
padding = params['attn']['padding']
use_bias = params['attn']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, M, C]
m_shape = [shape[i]+(shape[i]-1)*(rate[i]-1) for i in range(2)]
shape = shape + x_shape[3:] + number #[h, w, M, C, M', C']
with tf.variable_scope('attn1_'+str(layer)) as scope:
weights = tf.get_variable(name='weights', shape=shape, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=number, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
def attn_img(tensor_in):
if padding == 'SAME':
tf.pad
x_shape = get_shape(tensor_in) #[H, W, M, C]
if padding == 'VALID':
y_shape = [int(np.ceil((x_shape[i]-shape[i]+1)/stride[i])) for i in range(2)] + number
crd_sta = [m_shape[i]//2 for i in range(2)]
crd_end = [crd_sta[i]+(y_shape[i]-1)*stride[i] for i in range(2)]
else:
y_shape = [int(np.ceil(x_shape[i]/stride[i])) for i in range(2)] + number
crd_sta = [0, 0]
crd_end =
tensor_out = tf.TensorArray(dtype=tf.float32, size=y_shape[0]*y_shape[1], dynamic_size=False, clear_after_read=True, \
tensor_array_name=None, handle=None, flow=None, infer_shape=True, \
element_shape=number, colocate_with_first_write_call=True) #(H*W, M', C')
def cond(i, tensor_out):
c = tf.less(i, y_shape[0]*y_shape[1])
return c
def body(i, tensor_out):
ycd = i // y_shape[1] * stride[0] + crd_sta[0]
xcd = i % y_shape[1] * stride[1] + crd_sta[1]
crd = tf.stack([ycd, xcd], axis=0) #(2)
ymn = ycd - ((shape[0] - 1) // 2) * rate[0]
xmn = xcd - ((shape[1] - 1) // 2) * rate[1]
'''
ycds = tf.concat([[ymn], tf.tile([rate[0]], [shape[0]-1])], axis=0)
xcds = tf.concat([[xmn], tf.tile([rate[1]], [shape[1]-1])], axis=0)
ycds = tf.cumsum(ycds, axis=0, exclusive=False, reverse=False)
xcds = tf.cumsum(xcds, axis=0, exclusive=False, reverse=False)
yixs = tf.where(tf.logical_and(ycds>=0, ycds<x_shape[0]))[:, 0]
ycds = tf.gather(ycds, yixs)
xixs = tf.where(tf.logical_and(xcds>=0, xcds<x_shape[1]))[:, 0]
xcds = tf.gather(xcds, xixs)
ycds = tf.tile(ycds[:, tf.newaxis], [1, tf.shape(xcds)[0]]) #(h, w)
xcds = tf.tile(xcds[tf.newaxis, :], [tf.shape(ycds)[0], 1]) #(h, w)
crds = tf.stack([ycds, xcds], axis=-1) #(h, w, 2)
yixs = tf.tile(yixs[:, tf.newaxis], [1, tf.shape(xixs)[0]]) #(h, w)
xixs = tf.tile(xixs[tf.newaxis, :], [tf.shape(yixs)[0], 1]) #(h, w)
idxs = tf.stack([yixs, xixs], axis=-1) #(h, w, 2)
wgts = tf.gather_nd(weights, idxs) #(h, w, M, C, M', C')
fetx = tf.gather_nd(tensor_in, crds) #(h, w, M, C)
fety = tf.einsum('ijkl,ijklmn->mn', fetx, wgts) #(M', C')
#每个向量神经元C都预测出了M'个向量神经元C',取这M'个向量神经元C'的均值C',作为向量神经元C预测值
#该预测值C'会比输入C更偏向于合理的输出,从而在和所有输入预测的输出做相似性度量时,会得到更明确的相似性值
fetz = tf.einsum('ijkl,ijklmn->ijkn', fetx, wgts) #(h, w, M, C')
atts = tf.einsum('ijkn,mn->ijkm', fetz, fety) #(h, w, M, M')
wgts = wgts * atts[:, :, :, tf.newaxis, :, tf.newaxis] #(h, w, M, C, M', C')
fety = tf.einsum('ijkl,ijklmn->mn', fetx, wgts) + fety #(M', C')
fety = fety + biases if use_bias else fety
tensor_out = tensor_out.write(i, fety) #(H'*W', M', C')
'''
tensor_out = tensor_out.write(i, tensor_in[crd[0], crd[1]])
return [i+1, tensor_out]
#pra_itrs = max(y_shape[0] * y_shape[1] // 64, 16)
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=1, back_prop=True, swap_memory=True)
tensor_out = tensor_out.stack() #(H'*W', M', C')
tensor_out = tf.reshape(tensor_out, y_shape) #(H', W', M', C')
return tensor_out
tensor_out = tf.map_fn(attn_img, tensor_in, dtype=tf.float32, parallel_iterations=1, \
back_prop=True, swap_memory=True, infer_shape=True) #(N, H', W', M', C')
print_activations(tensor_out)
return tensor_out
def fold1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
stride = params['fold']['stride'] #[[2, 2], [2, 2]]
use_crs = params['fold']['use_crs']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('fold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [ 2 + i for i in range(num_srds)]
wdh_dims = [num_srds + 3 + i for i in range(num_srds)]
hws_dims = [[hgt_dims[i], wdh_dims[i]] for i in range(num_srds)]
hws_dims = reduce(lambda x,y: x+y, hws_dims)
new_num = x_shape[3] * hws_srd_all
new_hgt = x_shape[1] // hgt_srd_all
new_wdh = x_shape[2] // wdh_srd_all
old_hgt = new_hgt * hgt_srd_all
old_wdh = new_wdh * wdh_srd_all
if old_hgt != x_shape[1] or old_wdh != x_shape[2]:
tensor_in = tensor_in[:, :old_hgt, :old_wdh, :, :]
#x_shape = get_shape(tensor_in)
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt] + hgt_srds + [new_wdh] + wdh_srds + x_shape[3:])
tensor_in = tf.transpose(tensor_in, [0, 1, 2+num_srds] + hws_dims + [3+2*num_srds, 4+2*num_srds])
if use_crs:
indices = [np.array([1,2,4,3]) for srd in stride for i in range(srd[0]*srd[1]//4)]
for srd in stride:
idxs = np.arange(srd[0]*srd[1])
hsds = [2 for _ in range(srd[0]//2)]
wsds = [2 for _ in range(srd[1]//2)]
idxs = np.reshape(idxs, hsds+wsds)
hdms = [ i for i in range(srd[0]//2)]
wdms = [srd[0]//2+i for i in range(srd[1]//2)]
dims = []
leh0 = srd[0] * srd[1]
idx0 = np.arange(leh0)
leh1 = leh0 // 4 * 4
idx1 = idx0[:leh1]
hwsd = [[2, 2] for _ in range(srd[0]//2) for _ in range(srd[1]//2)]
idx1 = np.reshape(idx1, hwsd)
wsrd =
tensor_in = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, hws_srd_all] + x_shape[3:])
tensor_in = tf.gather(tensor_in, idxs, axis=3)
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num, x_shape[4]])
print_activations(tensor_out)
return tensor_out
def unfold1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
stride = params['unfold']['stride']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if isinstance(stride[0], int):
stride = [stride]
#stride = stride[::-1]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('unfold1_'+str(layer)) as scope:
num_srds = len(stride)
hgt_srds = [srd[0] for srd in stride]
wdh_srds = [srd[1] for srd in stride]
hws_srds = reduce(lambda x,y: x+y, stride )
hgt_srd_all = reduce(lambda x,y: x*y, hgt_srds)
wdh_srd_all = reduce(lambda x,y: x*y, wdh_srds)
hws_srd_all = hgt_srd_all * wdh_srd_all
hgt_dims = [3 + 2 * i for i in range(num_srds)]
wdh_dims = [4 + 2 * i for i in range(num_srds)]
new_num = x_shape[3] // hws_srd_all
new_hgt = x_shape[1] * hgt_srd_all
new_wdh = x_shape[2] * wdh_srd_all
old_num = new_num * hws_srd_all
if old_num != x_shape[3]:
tensor_in = tensor_in[:, :, :, :old_num, :]
#x_shape = get_shape(tensor_in)
tensor_in = tf.reshape(tensor_in, x_shape[0:3] + hws_srds + [new_num, x_shape[4]])
tensor_in = tf.transpose(tensor_in, [0, 1] + hgt_dims + [2] + wdh_dims + [3+2*num_srds, 4+2*num_srds])
tensor_out = tf.reshape(tensor_in, [x_shape[0], new_hgt, new_wdh, new_num, x_shape[4]])
print_activations(tensor_out)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
num_output = params['group_unit']['num_output'] #[4, 64]
num_bottle = params['group_unit']['num_bottle'] #[4, 16]
rate = params['group_unit']['rate']
stride = params['group_unit']['stride']
use_attn = params['group_unit']['use_attn']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0] #(None, 256, 256, 16, 64)
if isinstance(rate[0], int):
rate = [rate]
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(stride) > 1):
params['fold'] = {'stride': stride}
tensor_in = fold1(tensor_in, 0, params, mtrain)
params['proj'] = {'number':num_bottle, 'shape':[1,1], 'rate':rate[ 0]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn_relu1(tensor_in, 0, params, mtrain)
res_lst = []
for i in range(len(rate)-2):
params['proj'] = {'number':num_bottle, 'shape':[3,3], 'rate':rate[1+i]+[1,1], 'stride':[1,1,1,1], \
'padding':'SAME', 'use_bias':False, 'use_attn':use_attn}
residual = proj_bn_relu1(residual, 1+i, params, mtrain)
params['proj'] = {'number':num_output, 'shape':[1,1], 'rate':rate[ -1]+[1,1], 'stride':[1,1,1,1], \
'padding':'VALID', 'use_bias':False, 'use_attn':False}
residual = proj_bn1(residual, 0, params, mtrain)
tensor_out = tensor_in + residual
tensor_out = relu1(tensor_out, 0, params, mtrain)
return tensor_out
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
向量神经元专用,输入形状为[N, H, W, M, C]
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['proj']['number'] #[4, 64]
shape = params['proj']['shape'] #[3, 3]
rate = params['proj']['rate'] #[b, 2, 1, 1]
stride = params['proj']['stride'] #[2, 2, 1, 1]
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
use_attn = params['proj']['use_attn']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
if use_attn:
use_bias = True
with tf.variable_scope('proj1_'+str(layer), reuse=reuse) as scope:
if np.any(np.asarray(stride[0:2]) > 1):
params['fold']['stride'] = stride[0:2]
tensor_in = fold1(tensor_in, 0, params, mtrain)
x_shape = get_shape(tensor_in) #[None, 256, 256, 16, 64]
#block之内的联系紧密,block之外的联系松散
tensor_in = tf.reshape(tensor_in, x_shape[:3]+[rate[0],x_shape[3]//rate[0],x_shape[4]]) #(None, 128, 128, 2, 8, 64)
tensor_in = tf.transpose(tensor_in, [3, 0, 1, 2, 4, 5]) #(2, None, 128, 128, 8, 64)
x_shape = get_shape(tensor_in) #[2, None, 128, 128, 8, 64]
#根据通道上的膨胀率,再次对向量神经元进行划分
tensor_in = tf.reshape(tensor_in, x_shape[:4]+[x_shape[4]//rate[1],rate[1],x_shape[5]]) #(2, None, 128, 128, 4, 2, 64)
tensor_in = tf.transpose(tensor_in, [0, 5, 1, 2, 3, 4, 6]) #(2, 2, None, 128, 128, 4, 64)
x_shape = get_shape(tensor_in) #[2, 2, None, 128, 128, 4, 64]
y_shape = x_shape[0:5] + number #[2, 2, None, 128, 128, 4, 64]
#reshape以便于进行卷积操作
tsr_int = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]] + x_shape[2:5] + \
[x_shape[5]*x_shape[6]]) #(2*2, None, 128, 128, 4*64)
tsr_shp = get_shape(tsr_int) #[2*2, None, 128, 128, 4*64]
wgt_srd = [1, stride[2], stride[3], 1] #[1, 1, 1, 1]
wgt_rat = [1, rate[2], rate[3], 1] #[1, 1, 1, 1]
wgt_shp = [tsr_shp[0]] + shape + [tsr_shp[4], y_shape[5]*y_shape[6]] #[2*2, 3, 3, 4*64, 4*64]
weights = tf.get_variable(name='weights', shape=wgt_shp, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
bia_shp = [tsr_shp[0], 1, 1, 1, y_shape[5]*y_shape[6]] #[2*2, 1, 1, 1, 4*64]
biases = tf.get_variable(name='biases', shape=bia_shp, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
elems = [tsr_int, weights]
tsr_cov = tf.map_fn(lambda x: tf.nn.conv2d(x[0], x[1], wgt_srd, padding=padding, dilations=wgt_rat), \
elems, dtype=tf.float32, parallel_iterations=128, \
back_prop=True, swap_memory=True, infer_shape=True) #(2*2, None, 128, 128, 4*64)
if use_bias:
tsr_cov = tsr_cov + biases #(2*2, None, 128, 128, 4*64)
if use_attn:
elems = [tsr_int, tsr_cov, weights]
tsr_att = tf.map_fn(atten, elems, dtype=tf.float32, parallel_iterations=128, \
back_prop=True, swap_memory=True, infer_shape=True) #(2*2, None, 128, 128, 4*64)
tsr_att = tsr_att + biases
tsr_out = tsr_cov + tsr_att #(2*2, None, 128, 128, 4*64)
else:
tsr_out = tsr_cov #(2*2, None, 128, 128, 4*64)
tensor_out = tf.reshape(tsr_out, y_shape) #(2, 2, None, 128, 128, 4, 64)
tensor_out = tf.transpose(tensor_out, [2, 3, 4, 0, 5, 1, 6]) #(None, 128, 128, 2, 4, 2, 64)
y_shape = get_shape(tensor_out) #[None, 128, 128, 2, 4, 2, 64]
tensor_out = tf.reshape(tensor_out, y_shape[0:3] + \
[y_shape[3]*y_shape[4]*y_shape[5], y_shape[6]]) #(None, 128, 128, 16, 64)
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
#############4 neurons#############(128, 128, 4, 64)
#(128, 128, 4, 64)--[4, 64, 4, 16]--(128, 128, 4, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 1, 1, 1]
'''
#(128, 128, 4, 16)--[4, 16, 4, 16]--(128, 128, 4, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 1, 1, 1]
'''
#(128, 128, 4, 16)--[4, 16, 4, 64]--(128, 128, 4, 64) #output [4, 64] [1, 1, 1, 1] [ 1, 1, 1, 1]
#############16 neurons############( 64, 64, 16, 64)
#( 64, 64, 16, 64)--[4, 64, 4, 16]--( 64, 64, 16, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 4, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 4, 1, 1]
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 1, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 64]--( 64, 64, 16, 64) #output [4, 64] [1, 1, 1, 1] [ 4, 1, 1, 1]
#############64 neurons############( 32, 32, 64, 64)
#( 32, 32, 64, 64)--[4, 64, 4, 16]--( 32, 32, 64, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 16, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 16, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 4, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 1, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 64]--( 32, 32, 64, 64) #output [4, 64] [1, 1, 1, 1] [ 16, 1, 1, 1]
#############256 neurons###########( 16, 16, 256, 64)
#( 16, 16, 256, 64)--[2, 64, 2, 16]--( 16, 16, 256, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 64, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 64, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 16, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 4, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 64]--( 16, 16, 256, 64) #output [4, 64] [1, 1, 1, 1] [ 64, 1, 1, 1]
#############1024 neurons##########( 8, 8, 1024, 64)
#( 8, 8, 1024, 64)--[4, 16, 4, 64]--( 8, 8, 1024, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 256, 1, 1]
'''
#( 8, 8, 1024, 64)--[4, 16, 4, 16]--( 8, 8, 1024, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 256, 1, 1]
#( 8, 8, 1024, 64)--[4, 16, 4, 16]--( 8, 8, 1024, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 64, 1, 1]
#( 8, 8, 1024, 64)--[4, 16, 4, 16]--( 8, 8, 1024, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 16, 1, 1]
#( 8, 8, 1024, 64)--[4, 16, 4, 16]--( 8, 8, 1024, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 4, 1, 1]
#( 8, 8, 1024, 64)--[4, 16, 4, 16]--( 8, 8, 1024, 16) #bottle [4, 16] [1, 1, 1, 1] [256, 1, 1, 1]
'''
#( 8, 8, 1024, 16)--[4, 16, 4, 64]--( 8, 8, 1024, 64) #output [4, 64] [1, 1, 1, 1] [256, 1, 1, 1]
#############4 neurons#############(128, 128, 4, 64)
#(256, 256, 1, 64)--[1, 64, 1, 64]--(128, 128, 4, 64) #branch [1, 64] [2, 2, 1, 1] [ 4, 1, 1, 1]
#(256, 256, 1, 64)--[1, 64, 1, 16]--(128, 128, 4, 16) #input [1, 16] [2, 2, 1, 1] [ 4, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 16]--(128, 128, 4, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 64]--(128, 128, 4, 64) #output [4, 64] [1, 1, 1, 1] [ 1, 1, 1, 1]
#(128, 128, 4, 64)--[4, 64, 4, 16]--(128, 128, 4, 16) #input [4, 16] [1, 1, 1, 1] [ 1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 16]--(128, 128, 4, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 64]--(128, 128, 4, 64) #output [4, 64] [1, 1, 1, 1] [ 1, 1, 1, 1]
#############16 neurons############( 64, 64, 16, 64)
#(128, 128, 4, 64)--[2, 64, 2, 64]--( 64, 64, 16, 64) #branch [2, 64] [2, 2, 1, 1] [ 8, 1, 1, 1]
#(128, 128, 4, 64)--[2, 64, 2, 16]--( 64, 64, 16, 16) #input [2, 16] [2, 2, 1, 1] [ 8, 1, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 4, 1, 1]
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 1, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 64]--( 64, 64, 16, 64) #output [4, 64] [1, 1, 1, 1] [ 4, 1, 1, 1]
#( 64, 64, 16, 64)--[4, 64, 4, 16]--( 64, 64, 16, 16) #input [4, 16] [1, 1, 1, 1] [ 4, 1, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 4, 1, 1]
#( 64, 64, 16, 16)--[4, 16, 4, 16]--( 64, 64, 16, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 1, 1, 1]
'''
#( 64, 64, 16, 16)--[4, 16, 4, 64]--( 64, 64, 16, 64) #output [4, 64] [1, 1, 1, 1] [ 4, 1, 1, 1]
#############64 neurons############( 32, 32, 64, 64)
#( 64, 64, 16, 64)--[2, 64, 2, 64]--( 32, 32, 64, 64) #branch [2, 64] [2, 2, 1, 1] [ 32, 1, 1, 1]
#( 64, 64, 16, 64)--[2, 64, 2, 16]--( 32, 32, 64, 16) #input [2, 16] [2, 2, 1, 1] [ 32, 1, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 16, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 4, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 1, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 64]--( 32, 32, 64, 64) #output [4, 64] [1, 1, 1, 1] [ 16, 1, 1, 1]
#( 32, 32, 64, 64)--[4, 64, 4, 16]--( 32, 32, 64, 16) #input [2, 16] [1, 1, 1, 1] [ 16, 1, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 16, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 4, 1, 1]
#( 32, 32, 64, 16)--[4, 16, 4, 16]--( 32, 32, 64, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 1, 1, 1]
'''
#( 32, 32, 64, 16)--[4, 16, 4, 64]--( 32, 32, 64, 64) #output [4, 64] [1, 1, 1, 1] [ 16, 1, 1, 1]
#############256 neurons###########( 16, 16, 256, 64)
#( 32, 32, 64, 64)--[2, 64, 2, 64]--( 16, 16, 256, 64) #branch [2, 64] [2, 2, 1, 1] [128, 1, 1, 1]
#( 32, 32, 64, 64)--[2, 64, 2, 16]--( 16, 16, 256, 16) #input [2, 16] [2, 2, 1, 1] [128, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 64, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 16, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 4, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 64]--( 16, 16, 256, 64) #output [4, 64] [1, 1, 1, 1] [ 64, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 64, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 16, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 4, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 64]--( 16, 16, 256, 64) #output [4, 64] [1, 1, 1, 1] [ 64, 1, 1, 1]
#############256 neurons###########( 16, 16, 256, 64)
#( 32, 32, 64, 64)--[2, 64, 2, 64]--( 16, 16, 256, 64) #branch [2, 64] [2, 2, 1, 1] [128, 1, 1, 1]
#( 32, 32, 64, 64)--[2, 64, 2, 16]--( 16, 16, 256, 16) #input [2, 16] [2, 2, 1, 1] [128, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 256, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 64, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 16, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 4, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [256, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 64]--( 16, 16, 256, 64) #output [4, 64] [1, 1, 1, 1] [ 64, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 1, 256, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 4, 64, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 16, 16, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [ 64, 4, 1, 1]
#( 16, 16, 256, 16)--[4, 16, 4, 16]--( 16, 16, 256, 16) #bottle [4, 16] [1, 1, 1, 1] [256, 1, 1, 1]
'''
#( 16, 16, 256, 16)--[4, 16, 4, 64]--( 16, 16, 256, 64) #output [4, 64] [1, 1, 1, 1] [ 64, 1, 1, 1]
#############4 neurons#############(128, 128, 4, 64)
#(256, 256, 1, 64)--[1, 64, 1, 16]--(128, 128, 4, 16) #input [1, 16] [2, 2, 1, 1] [4, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 16]--(128, 128, 4, 16) #bottle [4, 16] [1, 1, 1, 1] [1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 64]--(128, 128, 4, 64) #output [1, 64] [1, 1, 1, 1] [1, 1, 1, 1]
#(128, 128, 4, 64)--[4, 64, 4, 16]--(128, 128, 4, 16) #input [1, 16] [1, 1, 1, 1] [1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 16]--(128, 128, 4, 16) #bottle [4, 16] [1, 1, 1, 1] [1, 1, 1, 1]
#(128, 128, 4, 16)--[4, 16, 4, 64]--(128, 128, 4, 64) #output [1, 64] [1, 1, 1, 1] [1, 1, 1, 1]
#the first resnet block setting
#(512, 512, 1, 64)--[2, 2, 1, 1]--(256, 256, 4, 64)
#(256, 256, 4, 64)--[1, 64, 1, 32]--(256, 256, 4, 32) #branch [1, 32] [2, 2, 1, 1] [4, 1, 1, 1]
#(256, 256, 4, 64)--[1, 64, 1, 8]--(256, 256, 4, 8) #input [1, 8] [1, 1, 1, 1] [4, 1, 1, 1]
#(256, 256, 4, 8)--[4, 8, 4, 8]--(256, 256, 4, 8) #bottle [4, 8] [1, 1, 1, 1] [1, 1, 1, 1]
#(256, 256, 4, 8)--[4, 8, 4, 32]--(256, 256, 4, 32) #output [4, 32] [1, 1, 1, 1] [1, 1, 1, 1]
#(256, 256, 4, 32)--[4, 32, 4, 8]--(256, 256, 4, 8) #input [1, 8] [1, 1, 1, 1] [1, 1, 1, 1]
#(256, 256, 4, 8)--[4, 8, 4, 8]--(256, 256, 4, 8) #bottle [4, 8] [1, 1, 1, 1] [1, 1, 1, 1]
#(256, 256, 4, 8)--[4, 8, 4, 32]--(256, 256, 4, 32) #output [4, 32] [1, 1, 1, 1] [1, 1, 1, 1]
#(256, 256, 4, 32)--[2, 2, 1, 1]--(256, 256, 16, 32)
#(256, 256, 16, 32)--[1, 64, 1, 32]--(256, 256, 4, 32) #branch [1, 32] [2, 2, 1, 1] [4, 1, 1, 1]
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
depth_output, depth_bottle, shape, stride, rate, use_attn, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
if j == 0: #the first unit in the block
params['group_unit'] = {'depth_output':depth_output, 'depth_bottle':depth_bottle, 'use_branch':True, \
'use_attn':use_attn, 'shape':shape, 'stride':stride, 'rate':rate}
else: #identity mapping
params['group_unit'] = {'depth_output':depth_output, 'depth_bottle':depth_bottle, 'use_branch':False, \
'use_attn':use_attn, 'shape':shape, 'stride':[1,1,1,1], 'rate':rate}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
length = params['group']['length'] #向量神经元的长度[输出, 中间]
number = params['group']['number'] #向量神经元的个数[输出, 中间]
shape = params['group']['shape'] #空间和通道方向操作的向量神经元是哪些[3, 3]
stride = params['group']['stride'] #如何把空间特征堆叠到通道方向的 [1, 1] / [2, 2]
rate = params['group']['rate'] #空间和通道方向的膨胀比率,防止3*3*C全连接参数太多[b, 1, 1, 1]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[None, 256, 256, 4, 64]
with tf.variable_scope('group1_'+str(layer)) as scope:
elif: number != tsr_shp[3] or length != tsr_shp[4]:
tsr_out = tf.reshape(tsr_int, tsr_shp[0:3]+[number, length]) #(None, 128, 128, 16, 64)
else:
tsr_out = tsr_int
def densefc(tsr_int):
with tf.variable_scope('densefc_'+str(layer)) as scope:
def get_residual_img(tsr_int):
tsr_shp = get_shape(tsr_int) #[2*1, 128, 128, 8, 32]
wgt_shp = [tsr_int[0]] + shape + #[16, 1, 1, 64, 64]
weights = tf.get_variable(name='weights', shape=wgt_shp, dtype=dtype, \ #(16, 1, 1, 64, 64)
initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), trainable=trainable)
elems = [tsr_int]
def get_residual_blk(elems=None):
shortcut = project(tensor_in, number[0], length[0], 0) #(None, 128, 128, 16, 32)
residual = project(tensor_in, number[1], length[1], 1) #(None, 128, 128, 8, 32)
residual = relu1(residual, 0, params, mtrain) #(None, 128, 128, 8, 32)
if x_shape[1]*x_shape[2]*x_shape[5] != number[0]:
new_num = number[0] // x_shape[1] // x_shape[2]
elif x_shape[6] !=
wgt_shp = x_shape[1:3] + shape[1:3] + [x_shape[5], length[2], number, length[2]] #[4, 1, 3, 3, 4, 32, 4, 32]
weights = tf.get_variable(name='weights', shape=wgt_shp, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',\
uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), trainable=trainable)
if length[2] != length[1]:
shape = np.asarray(shape) #[3, 3, 4, 4, 4]
wgt_shp = [[np.prod(shape[])] for i, n in enumerate(number)]
fet_shp = [[] ]
fet_shp0 = shape #[3, 3, 4, 4, 4]
wgt_shp0 = fet_shp0[:-1] + [fet_shp0[-1]//4] #[3, 3, 16, 1]
fet_shp1 = fet_shp0[:-1] + [fet_shp0[-1]+wgt_shp0[-1]] #[3, 3, 16, 5]
wgt_shp1 = fet_shp1[:-1] + [fet_shp1[-1], number[-1]] #[3, 3, 16, 5, 4]
fet_shp2 = fet_shp1[:-1] + [wgt_shp1[-1]] #[3, 3, 16, 4]
wgt_shp2 = fet_shp2[:-1] + [number[0]] #[3, 3, 16, 16]
fet_shp3 = [wgt_shp2[-1], fet_shp2[-1]] #(16, 4)
with tf.variable_scope('group1_'+str(layer)) as scope:
weight0 = tf.get_variable(name='weight0', shape=wgt_shp0, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #[3, 3, 16, 1]
weight1 = tf.get_variable(name='weight1', shape=wgt_shp1, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
weight2 = tf.get_variable(name='weight2', shape=wgt_shp2, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
tsr_out = tf.TensorArray(dtype=tf.float32, size=height*width, dynamic_size=False, clear_after_read=True, \
infer_shape=True, element_shape=[depth_input+depth_key], colocate_with_first_write_call=True)
if use_branch:
params['conv'] = {'number':depth_output, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
shortcut = conv_bn1(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['conv'] = {'number':depth_bottle, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
residual = conv_bn_relu1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':depth_bottle, 'shape':[3, 3], 'rate':rate, 'stride':[1, 1], 'padding':'SAME' }
residual = conv_bn_relu1(residual, 1, params, mtrain)
params['conv'] = {'number':depth_output, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
residual = conv_bn1(residual, 1, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def proj1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
用于向量神经元的升维和降维,输入形状为[n, h, w, m, c]
每个向量神经元肯定和自己是最相关的,所以对输入修改得越少越好,有点类似深度可分离卷积
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['proj']['number'] #[4,64]
shape = params['proj']['shape'] #[3, 3]
rate = params['proj']['rate'] #[b, 2, 1, 1]
stride = params['proj']['stride'] #[2, 2, 1, 1]
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
use_attn = params['proj']['use_attn']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[None, 256, 256, 16, 64]
'''
if number[0] == -1:
number[0] = x_shape[3]
if length[0] == -1:
length[0] = x_shape[4]
num_kep = x_shape[3] * x_shape[4] // length[0] // number[0]
wgt_shp = [num_kep, shape[0], shape[1], number[0]*length[0], number[1]*length[1]] #[16, 1, 1, 1*64, 1*64]
bia_shp = [num_kep, 1, 1, 1, number[1]*length[1]] #[16, 1, 1, 1, 1*64]
wgt_srd = [1, stride[2], stride[3], 1]
wgt_rat = [1, rate[2], rate[3], 1]
'''
with tf.variable_scope('proj1_'+str(layer), reuse=reuse) as scope:
if np.any(stride[0:2]) > 1:
tensor_in = tf.reshape(tensor_in, [x_shape[0], x_shape[1]//stride[0], stride[0], \
x_shape[2]//stride[1], stride[1]]+x_shape[3:]) #(None, 128, 2, 128, 2, 4, 64)
tensor_in = tf.transpose(tensor_in, [0, 1, 3, 2, 4, 5, 6]) #(None, 128, 128, 2, 2, 4, 64)
tensor_in = tf.reshape(tensor_in, [x_shape[0], x_shape[1]//stride[0], x_shape[2]//stride[1], \
stride[0]*stride[1]*x_shape[3], x_shape[4]]) #(None, 128, 128, 16, 64)
x_shape = get_shape(tensor_in) #[None, 128, 128, 16, 64]
#block之内的联系紧密,block之外的联系松散
tensor_in = tf.reshape(tensor_in, x_shape[:3]+[rate[0],x_shape[3]//rate[0],x_shape[4]]) #(None, 128, 128, 2, 8, 64)
tensor_in = tf.transpose(tensor_in, [3, 0, 1, 2, 4, 5]) #(2, None, 128, 128, 8, 64)
x_shape = get_shape(tensor_in) #[2, None, 128, 128, 8, 64]
#根据通道上的膨胀率,再次对向量神经元进行划分
tensor_in = tf.reshape(tensor_in, x_shape[:4]+[x_shape[4]//rate[1],rate[1],x_shape[5]]) #(2, None, 128, 128, 4, 2, 64)
tensor_in = tf.transpose(tensor_in, [0, 5, 1, 2, 3, 4, 6]) #(2, 2, None, 128, 128, 4, 64)
x_shape = get_shape(tensor_in) #[2, 2, None, 128, 128, 4, 64]
tensor_in = tf.reshape(tensor_in, [rate[0]*rate[1]]+x_shape[2:]) #(2*2, None, 128, 128, 4, 64)
x_shape = get_shape(tensor_in) #[2*2, None, 128, 128, 4, 64]
wgt_shp = [x_shape[0]] + shape + x_shape[4:] + number #[2*2, 3, 3, 4, 64, 4, 64]
bia_shp = [x_shape[0]] + shape + x_shape[4:] + number #[2*2, 3, 3, 4, 64, 4, 64]
weights = tf.get_variable(name='weights', shape=wgt_shp, dtype=dtype, \ #(2*2, 3, 3, 4, 64, 4, 64)
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=bia_shp, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
if use_attn:
tensor_in = tf.reshape(tensor_in, x_shape[0:3]+[num_kep,number[0]*length[0]]) #(None, 128, 128, 16, 1*64)
tensor_in = tf.transpose(tensor_in, [3, 0, 1, 2, 4]) #(16, None, 128, 128, 1*64)
elems = [tensor_in, kernel]
proj = tf.map_fn(lambda x: tf.nn.conv2d(x[0], x[1], wgt_srd, padding=padding, dilations=wgt_rat), \
elems, dtype=tf.float32, parallel_iterations=128, \
back_prop=True, swap_memory=True, infer_shape=True) #(16, None, 128, 128, 1*64)
if use_bias:
tensor_out = proj + biases #(16, None, 128, 128, 1*64)
else:
tensor_out = proj #(16, None, 128, 128, 1*64)
tensor_out = tf.transpose(tensor_out, [1, 2, 3, 0, 4]) #(None, 128, 128, 16, 1*64)
tensor_out = tf.reshape(tensor_out, x_shape[0:3]+[num_kep*number[1],length[1]]) #(None, 128, 128, 16*1, 64)
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
def proj2(tensor_in=None, layer=0, params=None, mtrain=None):
'''
用于向量神经元的升维和降维,输入形状为[n, h, w, m, c]
每个向量神经元肯定和自己是最相关的,所以对输入修改得越少越好,有点类似深度可分离卷积
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['proj']['number'] #[输入, 输出]
length = params['proj']['length'] #[输入, 输出]
shape = params['proj']['shape']
rate = params['proj']['rate']
stride = params['proj']['stride']
padding = params['proj']['padding']
use_bias = params['proj']['use_bias']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[None, 128, 128, 16, 64]
if number[0] == -1:
number[0] = x_shape[3]
if length[0] == -1:
length[0] = x_shape[4]
num_kep = x_shape[3] * x_shape[4] // length[0] // number[0]
wgt_shp = [num_kep, shape[0], shape[1], number[0]*length[0], number[1]*length[1]] #[16, 1, 1, 1*64, 1*64]
bia_shp = [num_kep, 1, 1, 1, number[1]*length[1]] #[16, 1, 1, 1, 1*64]
stride = [1, stride[0], stride[1], 1]
rate = [1, rate[0], rate[1], 1]
with tf.variable_scope('proj2_'+str(layer), reuse=reuse) as scope:
tensor_in = tf.reshape(tensor_in, x_shape[0:3]+[num_kep,number[0]*length[0]]) #(None, 128, 128, 16, 1*64)
tensor_in = tf.transpose(tensor_in, [3, 0, 1, 2, 4]) #(16, None, 128, 128, 1*64)
kernel = tf.get_variable(name='weights', shape=wgt_shp, dtype=dtype, \ #(16, 1, 1, 1*64, 1*64)
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=bia_shp, dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
elems = [tensor_in, kernel]
proj = tf.map_fn(lambda x: tf.nn.conv2d(x[0], x[1], stride, padding=padding, dilations=rate), \
elems, dtype=tf.float32, parallel_iterations=128, \
back_prop=True, swap_memory=True, infer_shape=True) #(16, None, 128, 128, 1*64)
if use_bias:
tensor_out = proj + biases #(16, None, 128, 128, 1*64)
else:
tensor_out = proj #(16, None, 128, 128, 1*64)
tensor_out = tf.transpose(tensor_out, [1, 2, 3, 0, 4]) #(None, 128, 128, 16, 1*64)
tensor_out = tf.reshape(tensor_out, x_shape[0:3]+[num_kep*number[1],length[1]]) #(None, 128, 128, 16*1, 64)
#tf.summary.histogram('proj', tensor_out)
print_activations(tensor_out)
return tensor_out
#获得残差特征
residual = tf.reshape(tensor_in, [x_shape[0]*x_shape[1]]+x_shape[2:5]+[x_shape[5]*x_shape[6]]) #[4*1, None, 128, 128, 4*64]
r_shape = get_shape(residual) #[4*1, None, 128, 128, 4*64]
leh_out = number // x_shape[0] // x_shape[1] * length[1] # 4*64
wgt_shp0 = [r_shape[0], 1, 1, r_shape[-1], leh_out] #[4*1, 1, 1, 4*64, 4*64]
weights0 = tf.get_variable(name='weights0', shape=wgt_shp0, dtype=dtype, \
initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), trainable=trainable)
def get_residual(elems):
residual, weights0 = elems
residual = tf.nn.conv2d(residual, weights0, [1, 1, 1, 1], padding='VALID', dilations=[1, 1, 1, 1])
residual = bn_relu1(residual, 0, params, mtrain)
return residual
elems = [residual, weights0]
residual = tf.map_fn(get_residual, elems, dtype=tf.float32, parallel_iterations=128, \
back_prop=True, swap_memory=True, infer_shape=True) #(4*1, None, 128, 128, 4*64)
r_shape = get_shape(residual) #[4*1, None, 128, 128, 4*64]
residual = tf.reshape(residual, r_shape[:-1]+[r_shape[-1]//length[1], length[1]]) #(4*1, None, 128, 128, 4, 64)
r_shape = get_shape(residual) #[4*1, None, 128, 128, 4, 64]
#获得旁路特征
tensor_out = tf.layers.conv2d(inputs=tensor_in, filters=number, kernel_size=shape, strides=stride, \
padding=padding, data_format='channels_last', dilation_rate=rate, \
activation=None, use_bias=use_bias, \
kernel_initializer=tf.initializers.truncated_normal(stddev=wscale), \
kernel_initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
bias_initializer=tf.zeros_initializer(), \
kernel_regularizer=tf.contrib.layers.l2_regularizer(reg), \
bias_regularizer=None, activity_regularizer=None, \
kernel_constraint=None, bias_constraint=None, \
trainable=trainable, reuse=reuse)
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
number = params['group']['number'] #[4, 4, 4]
shape = params['group']['shape'] #[3, 3, 4, 4, 4]
stride = params['group']['stride'] #[1, 1] / [2, 2]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[None, 256, 256, 64]
shape = np.asarray(shape) #[3, 3, 4, 4, 4]
wgt_shp = [[np.prod(shape[])] for i, n in enumerate(number)]
fet_shp = [[] ]
fet_shp0 = shape #[3, 3, 4, 4, 4]
wgt_shp0 = fet_shp0[:-1] + [fet_shp0[-1]//4] #[3, 3, 16, 1]
fet_shp1 = fet_shp0[:-1] + [fet_shp0[-1]+wgt_shp0[-1]] #[3, 3, 16, 5]
wgt_shp1 = fet_shp1[:-1] + [fet_shp1[-1], number[-1]] #[3, 3, 16, 5, 4]
fet_shp2 = fet_shp1[:-1] + [wgt_shp1[-1]] #[3, 3, 16, 4]
wgt_shp2 = fet_shp2[:-1] + [number[0]] #[3, 3, 16, 16]
fet_shp3 = [wgt_shp2[-1], fet_shp2[-1]] #(16, 4)
with tf.variable_scope('group1_'+str(layer)) as scope:
weight0 = tf.get_variable(name='weight0', shape=wgt_shp0, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable) #[3, 3, 16, 1]
weight1 = tf.get_variable(name='weight1', shape=wgt_shp1, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
weight2 = tf.get_variable(name='weight2', shape=wgt_shp2, dtype=dtype, \
#initializer=tf.initializers.truncated_normal(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32), \
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
tsr_out = tf.TensorArray(dtype=tf.float32, size=height*width, dynamic_size=False, clear_after_read=True, \
infer_shape=True, element_shape=[depth_input+depth_key], colocate_with_first_write_call=True)
def group_img(elems=None):
tsr_int = elems #(H, W, C)
def cond(i, tsr_out):
c = tf.less(i, x_shape[1]*x_shape[2])
return c
def body(i, tsr_out):
ycd = i // x_shape[2]
xcd = i % x_shape[2]
ymn = ycd - (shape[0] - 1) // 2
xmn = xcd - (shape[1] - 1) // 2
ycds = tf.concat([[ymn], tf.tile([1], [shape[0]-1])], axis=0)
xcds = tf.concat([[xmn], tf.tile([1], [shape[1]-1])], axis=0)
ycds = tf.cumsum(ycds, axis=0, exclusive=False, reverse=False)
xcds = tf.cumsum(xcds, axis=0, exclusive=False, reverse=False)
yixs = tf.where(tf.logical_and(ycds>=0, ycds<x_shape[1]))[:, 0]
ycds = tf.gather(ycds, yixs)
xixs = tf.where(tf.logical_and(xcds>=0, xcds<x_shape[2]))[:, 0]
xcds = tf.gather(xcds, xixs)
ycds = tf.tile(ycds[:, tf.newaxis], [1, tf.shape(xcds)[0]]) #(3, 3)
xcds = tf.tile(xcds[tf.newaxis, :], [tf.shape(ycds)[0], 1]) #(3, 3)
crds = tf.concat([ycds, xcds], axis=-1) #(3, 3, 2)
yixs = tf.tile(yixs[:, tf.newaxis], [1, tf.shape(xixs)[0]]) #(3, 3)
xixs = tf.tile(xixs[tf.newaxis, :], [tf.shape(yixs)[0], 1]) #(3, 3)
idxs = tf.concat([yixs, xixs], axis=-1) #(3, 3, 2)
fet0 = tf.gather_nd(tsr_int, crds) #(3, 3, 64)
fet0 = tf.reshape(fet0, fet_shp0) #(3, 3, 16, 4)
shp0 = get_shape(fet0)[:-1] + fet_shp0[2:] #[3, 3, 16, 4]
fet0 = tf.reshape(fet0, shp0) #(3, 3, 16, 4)
wgt0 = tf.gather_nd(weight0, idxs) #(3, 3, 16, 1)
fet1 = tf.concat([fet0, wgt0]) #(3, 3, 16, 5)
fet1 = tf.expand_dims(fet1, axis=-2) #(3, 3, 16, 1, 5)
wgt1 = tf.gather_nd(weight1, idxs) #(3, 3, 16, 5, 4)
fet2 = tf.matmul(fet1, wgt1) #(3, 3, 16, 1, 4)
fet2 = tf.nn.relu(fet2) #(3, 3, 16, 1, 4)
fet2 = tf.transpose(fet2, perm=[4, 3, 0, 1, 2]) #(4, 1, 3, 3, 16)
shp2 = get_shape(fet2) #[4, 1, 3, 3, 16]
fet2 = tf.reshape(fet2, shp2[0:2]+[-1]) #(4, 1, 3*3*16)
wgt2 = tf.gather_nd(weight2, idxs) #(3, 3, 4, 16, 16)
fet3 = tf.matmul(fet2, wgt2) #(3, 3, 4, 1, 16)
fet3 = tf.nn.relu(fet3) #(3, 3, 4, 1, 16)
crd0 = tf.stack([ycd, xcd], axis=0) #(2) 实际中心
crds0 = tf.concat([ycds, xcds], axis=-1) #(h, w, 2) 实际坐标
fets0 = tf.gather_nd(tensor_value, crds0) #(h, w, c) 实际特征
fets3 = tf.gather_nd(tensor_key, crds0) #(h, w, c') 实际特征
crd1 = (shape - 1) // 2 #(2) 相对中心
crds1 = (crds0 - crd0) // rate #(h, w, 2) 相对坐标
crds1 = crds1 + crd1 #(h, w, 2) 相对坐标
fets1 = tf.gather_nd(PE, crds1) #(h, w, c) 相对特征
#fets2= tf.concat([fets0, fets1], axis=-1) #(h, w, c'') 融合特征
crd3 = crd0 - crds0[0, 0] #crd、crds下标换成1也一样 #(2) 相对坐标
fet3 = tf.gather_nd(fets3, crd3) #(c') 相对中心
#计算注意力
att3 = tf.einsum('ijk,k->ij', fets3, fet3) #(h, w)
att3 = tf.exp(att3 / tf.sqrt(depth_key)) #(h, w)
att3 = att3 / tf.reduce_sum(att3) #(h, w)
fet0 = tf.einsum('ij,ijk->k', att3, fets0) #(c)
fet1 = tf.einsum('ij,ijk->k', att3, fets1) #(c')
fet2 = tf.concat([fet0, fet1], axis=-1) #(c'')
#fet2 = tf.einsum('ij,ijk->k', att3, fets2) #(c'')
tsr_out = tsr_out.write(i, fet2) #(h, w, c')
return [i+1, tsr_out]
i = tf.constant(0)
[i, tsr_out] = tf.while_loop(cond, body, loop_vars=[i, tsr_out], shape_invariants=None, \
parallel_iterations=128, back_prop=True, swap_memory=True)
if use_branch:
params['conv'] = {'number':depth_output, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
shortcut = conv_bn1(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['conv'] = {'number':depth_bottle, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
residual = conv_bn_relu1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':depth_bottle, 'shape':[3, 3], 'rate':rate, 'stride':[1, 1], 'padding':'SAME' }
residual = conv_bn_relu1(residual, 1, params, mtrain)
params['conv'] = {'number':depth_output, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
residual = conv_bn1(residual, 1, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
depth_output = params['group_unit']['depth_output']
depth_bottle = params['group_unit']['depth_bottle']
use_branch = params['group_unit']['use_branch']
shape = params['group_unit']['shape'] #LSTM和Attenion的关联形状
stride = params['group_unit']['stride'] #是如何把空间特征堆叠到通道方向的
rate = params['group_unit']['rate'] #提取抽象形状特征时,卷积的膨胀率,防止1*1*C全连接参数太多
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape = tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if use_branch:
params['conv'] = {'number':depth_output, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
shortcut = conv_bn1(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['conv'] = {'number':depth_bottle, 'shape':shape, 'rate':1, 'stride':stride, 'padding':'VALID'}
residual = conv_bn_relu1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':depth_bottle, 'shape':[3, 3], 'rate':rate, 'stride':[1, 1], 'padding':'SAME' }
residual = conv_bn_relu1(residual, 1, params, mtrain)
params['conv'] = {'number':depth_output, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
residual = conv_bn1(residual, 1, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def group_bn_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
number = params['group']['number']
multiple = params['group']['multiple']
shape = params['group']['shape']
rate = params['group']['rate']
stride = params['group']['stride']
padding = params['group']['padding']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_bn_relu1_'+str(layer)) as scope:
params['conv'] = {'number':number, 'shape':[3, 3],'rate':[1, 1],'stride':[1, 1],'padding':'SAME' }
tensor_out = conv_bn_relu1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':multiple,'shape':shape, 'rate':rate, 'stride':stride,'padding':padding}
tensor_out = conv_bn_relu2(tensor_out, 0, params, mtrain)
return tensor_out
if use_attn:
gamma = tf.get_variable(name='gamma', shape=[len(shape)], dtype=dtype, \
initializer=tf.constant_initializer(0.0), trainable=trainable)
if use_attn:
params['affine'] = {'dim': num_int[l], 'use_bias': True}
tsr_qry = affine1(tsr_out, 3*l+0, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_key = affine1(tsr_out, 3*l+1, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_vau = affine1(tsr_out, 3*l+2, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_qry = tf.reshape(tsr_qry, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_key = tf.reshape(tsr_key, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_vau = tf.reshape(tsr_vau, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c )
qry_key = tf.matmul(tsr_qry, tsr_key, transpose_b=True) #(N, H'*W'*C', H'*W'*C')
qry_key = tf.nn.softmax(qry_key, axis=-1) #(N, H'*W'*C', H'*W'*C')
tsr_att = tf.matmul(qry_key, tsr_vau) #(N, H'*W'*C', h*w*c)
tsr_att = tf.reshape(tsr_att, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
tsr_out = tsr_out + tsr_att * gamma[l] #(N*H'*W'*C', h*w*c)
if use_prev and np.all(np.asarray(get_shape(tensor_out)==np.asarray(get_shape(tensor_in)))):
tensor_out = tensor_out + tensor_in
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
1.保证∏(i=1->n)xi = d(d为原本的参数维度,在这里为特征图的体积H*W*C),从而进行分组,以进行局部全连接
2.参数复杂度为∑(i=1->n)(xi)^2
3.batchnorm只针对/除滤波器输出的维度外/的维度做,滤波器输出的特征为本层看重的特征
4.本函数包含针对主干网络的注意力机制,以聚集相似特征,使滤波器在较小体积的情况下依然能够尽可能地把相似特征映射为差异特征
这样不仅提高了网络的抽象能力和表达能力,还起到进一步增大网络感受野的作用。
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
shape = params['group']['shape'] #[[[7, 4, 1], [7, 4, 1]], [[4, 7, 1], [4, 7, 1]]]
use_attn = params['group']['use_attn']
use_prev = params['group']['use_prev']
use_drop = params['group']['use_drop']
keep_p = params['group']['keep_p']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tsr_out = tensor_in
shape = np.asarray(shape)
num_int = np.prod(shape[:, 0], axis=1) #[h1*w1*c1, h2*w2*c2]
num_out = np.prod(shape[:, 1], axis=1) #[h'*w'*c', h"*w"*c"]
with tf.variable_scope('group1_'+str(layer), reuse=reuse) as scope:
if use_attn:
gamma = tf.get_variable(name='gamma', shape=[len(shape)], dtype=dtype, \
initializer=tf.constant_initializer(0.0), trainable=trainable)
for l in range(len(shape)):
shp_int = get_shape(tsr_out) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shp_out = shp_int // shape[l][0] #[H', W', C']
shp_int = np.stack([shp_out, shape[l][0]], axis=-1) #[[H', h], [W', w], [C', c]]
shp_int = np.reshape(shp_int, -1) #[H', h, W', w, C', c]
shp_int = [bat_siz] + list(shp_int) #[N, H', h, W', w, C', c]
shp_idx = np.arange(1, len(shp_int)).reshape([-1, 2]) #[[1, 2], [3, 4], [5, 6]]
shp_idx = np.transpose(shp_idx).reshape(-1) #[1, 3, 5, 2, 4, 6]
shp_idx = [ 0] + list(shp_idx) #[0, 1, 3, 5, 2, 4, 6]
shp_out = [-1] + list(shp_out) #[-1, H', W', C']
tsr_out = tf.reshape(tsr_out, shp_int) #(N, H', h, W', w, C', c)
tsr_out = tf.transpose(tsr_out, shp_idx) #(N, H', W', C', h, w, c)
tsr_out = tf.reshape(tsr_out, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
if use_attn:
params['affine'] = {'dim': num_int[l], 'use_bias': True}
tsr_qry = affine1(tsr_out, 3*l+0, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_key = affine1(tsr_out, 3*l+1, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_vau = affine1(tsr_out, 3*l+2, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_qry = tf.reshape(tsr_qry, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_key = tf.reshape(tsr_key, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_vau = tf.reshape(tsr_vau, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c )
qry_key = tf.matmul(tsr_qry, tsr_key, transpose_b=True) #(N, H'*W'*C', H'*W'*C')
qry_key = tf.nn.softmax(qry_key, axis=-1) #(N, H'*W'*C', H'*W'*C')
tsr_att = tf.matmul(qry_key, tsr_vau) #(N, H'*W'*C', h*w*c)
tsr_att = tf.reshape(tsr_att, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
tsr_out = tsr_out + tsr_att * gamma[l] #(N*H'*W'*C', h*w*c)
params['affine'] = {'dim': num_out[l], 'use_bias': False}
tsr_out = affine_bn_relu1(tsr_out, l, params, mtrain) #(N*H'*W'*C', h'*w'*c')
if use_drop:
params['dropout'] = {'keep_p': keep_p, 'shape': None}
tsr_out = dropout1(tsr_out, l, params, mtrain) #(N*H'*W'*C', h'*w'*c')
tsr_out = tf.reshape(tsr_out, [bat_siz, -1, num_out[l]]) #(N, H'*W'*C', h'*w'*c')
tsr_out = tf.transpose(tsr_out, [0, 2, 1]) #(N, h'*w'*c', H'*W'*C')
tsr_out = tf.reshape(tsr_out, shp_out) #(N*h'*w'*c', H', W', C')
'''
prt_opa = tf.print([gamma])
with tf.control_dependencies([prt_opa]):
tsr_out = tf.identity(tsr_out)
'''
#tsr_out-->(N*h'*w'*c'*h"*w"*c", H', W', C')
shp_int = get_shape(tensor_in) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shape = np.concatenate([shape[:, 1], [shp_out[1:]]], axis=0) #[[h', w', c'], [h", w", c"], [H', W', C']]
shp_out0 = [bat_siz] + list(np.reshape(shape, -1)) #[N, h', w', c', h", w", c", H', W', C']
shp_out1 = [bat_siz] + list(np.prod(shape, axis=0)) #[N, h'*h"*H', w'*w"*W', c'*c"*C']
shp_idx = np.arange(1, len(shp_out0)) #[1, 2, 3, 4, 5, 6, 7, 8, 9]
shp_idx = np.reshape(shp_idx, shape.shape) #[[1, 2, 3], [4, 5, 6], [7, 8, 9]]
shp_idx = np.transpose(shp_idx) #[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
shp_idx = shp_idx[:, ::-1].reshape(-1) #[7, 4, 1, 8, 5, 2, 9, 6, 3]
shp_idx = [0] + list(shp_idx) #[0, 7, 4, 1, 8, 5, 2, 9, 6, 3]
tsr_out = tf.reshape (tsr_out, shp_out0) #(N, h', w', c', h", w", c", H', W', C')
tsr_out = tf.transpose(tsr_out, shp_idx ) #(N, H', h", h', W', w", w', C', c", c')
tensor_out = tf.reshape(tsr_out, shp_out1) #(N, H'*h"*h', W'*w"*w', C'*c"*c')
if use_prev and np.all(np.asarray(get_shape(tensor_out)==np.asarray(get_shape(tensor_in)))):
tensor_out = tensor_out + tensor_in
print_activations(tensor_out)
return tensor_out
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
1.保证∏(i=1->n)xi = d(d为原本的参数维度,在这里为特征图的体积H*W*C),从而进行分组,以进行局部全连接
2.参数复杂度为∑(i=1->n)(xi)^2
3.batchnorm只针对/除滤波器输出的维度外/的维度做,滤波器输出的特征为本层看重的特征
4.本函数包含针对主干网络的注意力机制,以聚集相似特征,使滤波器在较小体积的情况下依然能够尽可能地把相似特征映射为差异特征
这样不仅提高了网络的抽象能力和表达能力,还起到进一步增大网络感受野的作用。
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
shape = params['group']['shape'] #[[[7, 4, 1], [7, 4, 1]], [[4, 7, 1], [4, 7, 1]]]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tsr_out = tensor_in
shape = np.asarray(shape)
num_int = np.prod(shape[:, 0], axis=1) #[h1*w1*c1, h2*w2*c2]
num_out = np.prod(shape[:, 1], axis=1) #[h'*w'*c', h"*w"*c"]
with tf.variable_scope('group1_'+str(layer), reuse=reuse) as scope:
for l in range(len(shape)):
shp_int = get_shape(tsr_out) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shp_out = shp_int // shape[l][0] #[H', W', C']
shp_int = np.stack([shp_out, shape[l][0]], axis=-1) #[[H', h], [W', w], [C', c]]
shp_int = np.reshape(shp_int, -1) #[H', h, W', w, C', c]
shp_int = [bat_siz] + list(shp_int) #[N, H', h, W', w, C', c]
shp_idx = np.arange(1, len(shp_int)).reshape([-1, 2]) #[[1, 2], [3, 4], [5, 6]]
shp_idx = np.transpose(shp_idx).reshape(-1) #[1, 3, 5, 2, 4, 6]
shp_idx = [ 0] + list(shp_idx) #[0, 1, 3, 5, 2, 4, 6]
shp_out = [-1] + list(shp_out) #[-1, H', W', C']
tsr_out = tf.reshape(tsr_out, shp_int) #(N, H', h, W', w, C', c)
tsr_out = tf.transpose(tsr_out, shp_idx) #(N, H', W', C', h, w, c)
tsr_out = tf.reshape(tsr_out, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
params['affine'] = {'dim': num_out[l], 'use_bias': False}
tsr_out = affine_bn_relu1(tsr_out, l, params, mtrain) #(N*H'*W'*C', h'*w'*c')
tsr_out = tf.reshape(tsr_out, [bat_siz, -1, num_out[l]]) #(N, H'*W'*C', h'*w'*c')
tsr_out = tf.transpose(tsr_out, [0, 2, 1]) #(N, h'*w'*c', H'*W'*C')
tsr_out = tf.reshape(tsr_out, shp_out) #(N*h'*w'*c', H', W', C')
'''
prt_opa = tf.print([gamma])
with tf.control_dependencies([prt_opa]):
tsr_out = tf.identity(tsr_out)
'''
#tsr_out-->(N*h'*w'*c'*h"*w"*c", H', W', C')
shp_int = get_shape(tensor_in) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shape = np.concatenate([shape[:, 1], [shp_out[1:]]], axis=0) #[[h', w', c'], [h", w", c"], [H', W', C']]
shp_out0 = [bat_siz] + list(np.reshape(shape, -1)) #[N, h', w', c', h", w", c", H', W', C']
shp_out1 = [bat_siz] + list(np.prod(shape, axis=0)) #[N, h'*h"*H', w'*w"*W', c'*c"*C']
shp_idx = np.arange(1, len(shp_out0)) #[1, 2, 3, 4, 5, 6, 7, 8, 9]
shp_idx = np.reshape(shp_idx, shape.shape) #[[1, 2, 3], [4, 5, 6], [7, 8, 9]]
shp_idx = np.transpose(shp_idx) #[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
shp_idx = shp_idx[:, ::-1].reshape(-1) #[7, 4, 1, 8, 5, 2, 9, 6, 3]
shp_idx = [0] + list(shp_idx) #[0, 7, 4, 1, 8, 5, 2, 9, 6, 3]
tsr_out = tf.reshape (tsr_out, shp_out0) #(N, h', w', c', h", w", c", H', W', C')
tsr_out = tf.transpose(tsr_out, shp_idx ) #(N, H', h", h', W', w", w', C', c", c')
tensor_out = tf.reshape(tsr_out, shp_out1) #(N, H'*h"*h', W'*w"*w', C'*c"*c')
print_activations(tensor_out)
return tensor_out
'''
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
output_shape = params['group_unit']['output_shape'] #(H', W', C')
bottle_shape = params['group_unit']['bottle_shape'] #(H", W", C")
filter_shape = params['group_unit']['filter_shape'] #(h, w, c)
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in) #(N, H, W, C)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(x_shape[1:]) != np.asarray(output_shape)): #深度可分离卷积!!!
number = output_shape[-1] // x_shape[-1]
shape = [3, 3]
stride = np.asarray(x_shape[1:3]) // np.asarray(output_shape[0:2])
params['conv'] = {'number': number, 'shape': shape, 'rate': [1, 1], 'stride': stride, 'padding': 'SAME'}
shortcut = conv_bn3(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(tensor_in, 0, params, mtrain)
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(residual, 1, params, mtrain)
params['group'] = {'output_shape': output_shape, 'filter_shape': filter_shape}
residual = group_bn1(residual, 0, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
'''
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
1.保证∏(i=1->n)xi = d(d为原本的参数维度,在这里为特征图的体积H*W*C),从而进行分组,以进行局部全连接
2.参数复杂度为∑(i=1->n)(xi)^2
3.batchnorm只针对/除滤波器输出的维度外/的维度做,滤波器输出的特征为本层看重的特征
4.本函数包含针对主干网络的注意力机制,以聚集相似特征,使滤波器在较小体积的情况下依然能够尽可能地把相似特征映射为差异特征
这样不仅提高了网络的抽象能力和表达能力,还起到进一步增大网络感受野的作用。
'''
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
shape = params['group']['shape'] #[[[7, 4, 1], [7, 4, 1]], [[4, 7, 1], [4, 7, 1]]]
num_loop = params['group']['num_loop'] #[2, 2]
use_attn = params['group']['use_attn']
use_drop = params['group']['use_drop']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tsr_out = tensor_in
shape = np.asarray(shape)
num_int = np.prod(shape[:, 0], axis=1) #[h1*w1*c1, h2*w2*c2]
num_out = np.prod(shape[:, 1], axis=1) #[h'*w'*c', h"*w"*c"]
with tf.variable_scope('group1_'+str(layer), reuse=reuse) as scope:
if use_attn:
gamma = tf.get_variable(name='gamma', shape=[len(shape)], dtype=dtype, \
initializer=tf.constant_initializer(0.0), trainable=trainable)
for l in range(len(shape)):
shp_int = get_shape(tsr_out) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shp_out = shp_int // shape[l][0] #[H', W', C']
shp_int = np.stack([shp_out, shape[l][0]], axis=-1) #[[H', h], [W', w], [C', c]]
shp_int = np.reshape(shp_int, -1) #[H', h, W', w, C', c]
shp_int = [bat_siz] + list(shp_int) #[N, H', h, W', w, C', c]
shp_idx = np.arange(1, len(shp_int)).reshape([-1, 2]) #[[1, 2], [3, 4], [5, 6]]
shp_idx = np.transpose(shp_idx).reshape(-1) #[1, 3, 5, 2, 4, 6]
shp_idx = [ 0] + list(shp_idx) #[0, 1, 3, 5, 2, 4, 6]
shp_out = [-1] + list(shp_out) #[-1, H', W', C']
tsr_out = tf.reshape(tsr_out, shp_int) #(N, H', h, W', w, C', c)
tsr_out = tf.transpose(tsr_out, shp_idx) #(N, H', W', C', h, w, c)
tsr_out = tf.reshape(tsr_out, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
if use_attn:
params['affine'] = {'dim': num_int[l], 'use_bias': True}
tsr_qry = affine1(tsr_out, 3*l+0, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_key = affine1(tsr_out, 3*l+1, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_vau = affine1(tsr_out, 3*l+2, params, mtrain) #(N*H'*W'*C', h*w*c)
tsr_qry = tf.reshape(tsr_qry, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_key = tf.reshape(tsr_key, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c)
tsr_vau = tf.reshape(tsr_vau, [bat_siz, -1, num_int[l]]) #(N, H'*W'*C', h*w*c )
qry_key = tf.matmul(tsr_qry, tsr_key, transpose_b=True) #(N, H'*W'*C', H'*W'*C')
qry_key = tf.nn.softmax(qry_key, axis=-1) #(N, H'*W'*C', H'*W'*C')
tsr_att = tf.matmul(qry_key, tsr_vau) #(N, H'*W'*C', h*w*c)
tsr_att = tf.reshape(tsr_att, [-1, num_int[l]]) #(N*H'*W'*C', h*w*c)
tsr_out = tsr_out + tsr_att * gamma[l] #(N*H'*W'*C', h*w*c)
params['affine'] = {'dim': num_out[l], 'use_bias': False}
for i in range(num_loop[l]):
tsr_out = affine_bn_relu1(tsr_out, num_loop[l]*l+i, params, mtrain) #(N*H'*W'*C', h'*w'*c')
params['dropout'] = {'keep_p': 0.9, 'shape': None}
if use_drop:
tsr_out = dropout1(tsr_out, l, params, mtrain) #(N*H'*W'*C', h'*w'*c')
tsr_out = tf.reshape(tsr_out, [bat_siz, -1, num_out[l]]) #(N, H'*W'*C', h'*w'*c')
tsr_out = tf.transpose(tsr_out, [0, 2, 1]) #(N, h'*w'*c', H'*W'*C')
tsr_out = tf.reshape(tsr_out, shp_out) #(N*h'*w'*c', H', W', C')
'''
prt_opa = tf.print([gamma])
with tf.control_dependencies([prt_opa]):
tsr_out = tf.identity(tsr_out)
'''
#tsr_out-->(N*h'*w'*c'*h"*w"*c", H', W', C')
shp_int = get_shape(tensor_in) #[N, H, W, C]
bat_siz = shp_int[0] # N
shp_int = np.asarray(shp_int[1:]) #[H, W, C]
shape = np.concatenate([shape[:, 1], [shp_out[1:]]], axis=0) #[[h', w', c'], [h", w", c"], [H', W', C']]
shp_out0 = [bat_siz] + list(np.reshape(shape, -1)) #[N, h', w', c', h", w", c", H', W', C']
shp_out1 = [bat_siz] + list(np.prod(shape, axis=0)) #[N, h'*h"*H', w'*w"*W', c'*c"*C']
shp_idx = np.arange(1, len(shp_out0)) #[1, 2, 3, 4, 5, 6, 7, 8, 9]
shp_idx = np.reshape(shp_idx, shape.shape) #[[1, 2, 3], [4, 5, 6], [7, 8, 9]]
shp_idx = np.transpose(shp_idx) #[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
shp_idx = shp_idx[:, ::-1].reshape(-1) #[7, 4, 1, 8, 5, 2, 9, 6, 3]
shp_idx = [0] + list(shp_idx) #[0, 7, 4, 1, 8, 5, 2, 9, 6, 3]
tsr_out = tf.reshape (tsr_out, shp_out0) #(N, h', w', c', h", w", c", H', W', C')
tsr_out = tf.transpose(tsr_out, shp_idx ) #(N, H', h", h', W', w", w', C', c", c')
tsr_out = tf.reshape (tsr_out, shp_out1) #(N, H'*h"*h', W'*w"*w', C'*c"*c')
#tsr_out = relu1(tsr_out, l, params, mtrain) #(N, H'*h"*h', W'*w"*w', C'*c"*c')
return tsr_out
att_tsr = tsr_out * alpha[l] #(N, H'*W'*C', h'*w'*c')
att_key = tf.einsum('ijk, imk->ijm', att_tsr, att_tsr) #(N, H'*W'*C', H'*W'*C')
att_key = tf.nn.softmax(att_key, axis=-1) #(N, H'*W'*C', H'*W'*C')
#att_num= tf.sqrt(tf.cast(num_out[l], dtype=tf.float32)) # sqrt(h'*w'*c')
#att_key= tf.nn.softmax(att_key/att_num, axis=-1) #(N, H'*W'*C', H'*W'*C')
tsr_att = tf.einsum('ijk, ikm->ijm', att_key, tsr_out) #(N, H'*W'*C', h'*w'*c')
tsr_out = tsr_out + tsr_att * beta[l] #(N, H'*W'*C', h'*w'*c')
if use_attn:
alpha = tf.get_variable(name='alpha', shape=[len(shape)], dtype=dtype, \
initializer=tf.truncated_normal_initializer(stddev=wscale), trainable=trainable)
beta = tf.get_variable(name='beta', shape=[len(shape)], dtype=dtype, \
initializer=tf.truncated_normal_initializer(stddev=wscale), trainable=trainable)
def group1(tensor_in=None, layer=0, params=None, mtrain=None):
'''
1.保证∏(i=1->n)xi = d(d为原本的参数维度,在这里为特征图的体积H*W*C),从而进行分组,以进行局部全连接
2.参数复杂度为∑(i=1->n)(xi)^2
3.batchnorm只针对/除滤波器输出的维度外/的维度做,滤波器输出的特征为本层看重的特征
4.本函数包含针对主干网络的注意力机制,以聚集相似特征,使滤波器在较小体积的情况下依然能够尽可能地把相似特征映射为差异特征
这样不仅提高了网络的抽象能力和表达能力,还起到进一步增大网络感受野的作用。
'''
output_shape = params['group']['output_shape'] #[H', W', C']
filter_shape = params['group']['filter_shape'] #[h, w, c] [8, 8, 8]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in) #[N, H, W, C]
#降维时刚进入堆叠层时就降,升维时最终输出堆叠层时才升,以节省内存,在信息量不过损失的情况下,保持bottleneck连接
out_in = np.asarray(output_shape) / np.asarray(x_shape[1:])
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
t_num = []
t_res = []
t_add = []
for i in range(len(filter_shape)):
d_add = False
d_num = np.log(x_shape[1+i]) // np.log(filter_shape[i])
d_num = d_num.astype(dtype=np.int32, copy=False)
d_shp = np.power(filter_shape[i], d_num)
d_shp = d_shp.astype(dtype=np.int32, copy=False)
d_res = x_shape[1+i] // d_shp
d_shp = d_shp * d_res
assert d_shp == x_shape[1+i], 'The filter_shape[%d] and x_shape[%d] do not match!' %(i, i+1)
if d_res != 1 or x_shape[1+i] == 1:
d_num = d_num + 1
d_add = True
t_num.append(d_num)
t_res.append(d_res)
t_add.append(d_add)
#t_num=[2, 1, 1], t_res=[2, 1, 1], t_add=[True, False, True], x_shape=[N, 8, 4, 1], filter_shape=[4, 4, 1]
#[[[4, 4, 1], [4, 4, 1]], [[2, 1, 1], [2, 1, 1]]]
m_num = np.amax(t_num)
#输入输出参数,本着t_shp[i][0]和t_shp[i][1]差异尽可能小的原则,这样参数利用率高
t_shp = [] #[[[h, w, c], [h', w', c']]]*np.amax(t_num)
for i in range(np.amax(t_num)):
d_shp = []
for j in range(len(filter_shape)):
return
def group_bn1(tensor_in=None, layer=0, params=None, mtrain=None):
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#不需要rate,因为本卷积关联整个图像
#不需要pad, 因为本卷积只利用图像中的有效像素
#stride不由卷积控制,而由avg_pool控制,先做avg_pool,再做relu
#batchnorm只针对batch做,不针对通道或空间,因为通道和空间本身都是特征
#当面积或通道数较小时,没有必要分块进行全连接
output_shape = params['group']['output_shape'] #[H', W', C']
filter_shape = params['group']['filter_shape'] #[h, w, c] [8, 8, 64]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in)
#降维时刚进入堆叠层时就降,升维时最终输出堆叠层时才升,以节省内存,在信息量不过损失的情况下,保持bottleneck连接
out_in = np.asarray(output_shape) / np.asarray(x_shape[1:])
assert (out_in[0]<=1 and out_in[1]<=1) or (out_in[0]>1 and out_in[1]>1), 'The space shape of output is wrong!'
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#空间关联次数p_num
p_add = False
p_num = np.log(np.asarray(x_shape[1:3])) // np.log(np.asarray(filter_shape[0:2]))
p_num = p_num.astype(dtype=np.int32, copy=False)
assert p_num[0] == p_num[1], 'The space shape of filter is wrong!'
p_num = p_num[0]
p_shp = np.power(np.asarray(filter_shape[0:2]), p_num)
p_shp = p_shp.astype(dtype=np.int32, copy=False)
p_res = np.asarray(x_shape[1:3]) // p_shp
p_shp = p_shp * p_res
assert np.all(p_shp == np.asarray(x_shape[1:3])), 'The space shape of filter is wrong!'
if np.any(p_res != np.array([1, 1])) or np.all(np.asarray(x_shape[1:3]) == np.array([1, 1])):
p_num = p_num + 1
p_add = True
#通道关联次数c_num
c_add = False
c_num = np.log(x_shape[-1]) // np.log(filter_shape[-1])
c_num = c_num.astype(dtype=np.int32, copy=False)
c_shp = np.power(filter_shape[-1], c_num)
c_shp = c_shp.astype(dtype=np.int32, copy=False)
c_res = x_shape[-1] // c_shp
c_shp = c_shp * c_res
assert c_shp == x_shape[-1], 'The channel shape of filter is wrong!'
if c_res != 1 or x_shape[-1] == 1:
c_num = c_num + 1
c_add = True
#空间关联参数,本着pos_shp0和pos_shp1差异尽可能小的原则,这样参数利用率高
p_shape = [] #[[8, 8, p1, p2]]*p_number,只针对空间,8*8 × p1*p2 的全连接共p_number次,使用conv2d
for i in range(p_num):
pos_shp = []
#若面积增大且有p_res,则将其放到最后一层;若面积减小或相等且有p_res,则将其放到第一层
#out_in[0]<=1 or out_in[0]>1肯定会发生,i==0和i==p_num-1也肯定会经过,p_res存在的话肯定会得到处理
pos_shp0 = np.asarray(filter_shape[0:2])
if (i == 0 and out_in[0] <= 1) or (i == p_num - 1 and out_in[0] > 1):
if p_add:
pos_shp0 = p_res
pos_shp1 = pos_shp0 * out_in[0:2]
pos_shp1 = pos_shp1.astype(dtype=np.int32, copy=False)
assert np.all(pos_shp1 / pos_shp0 == out_in[0:2]), 'The space shape of output is wrong!'
else:
pos_shp1 = pos_shp0
pos_shp.extend(list(pos_shp0))
pos_shp.extend(list(pos_shp1))
p_shape.append(pos_shp)
#通道关联参数,本着chn_shp0和chn_shp1差异尽可能小的原则,这样参数利用率高
c_shape = [] #[[16, c]]*c_number,只针对通道,16 × c 的全连接共c_number次,使用conv1d
for i in range(c_num):
chn_shp = []
#若通道增多且有c_res,则将其放到最后一层;若通道减少或相等且有c_res,则将其放到第一层
#out_in[-1]<=1 or out_in[-1]>1肯定会发生,i==0和i==c_num-1也肯定会经过,c_res存在的话肯定会得到处理
chn_shp0 = filter_shape[-1]
if (i == 0 and out_in[-1] <= 1) or (i == c_num -1 and out_in[-1] > 1):
if c_add:
chn_shp0 = c_res
chn_shp1 = chn_shp0 * out_in[-1]
chn_shp1 = chn_shp1.astype(dtype=np.int32, copy=False)
assert chn_shp1 / chn_shp0 == out_in[-1], 'The channel shape of output is wrong!'
else:
chn_shp1 = chn_shp0
chn_shp.append(chn_shp0)
chn_shp.append(chn_shp1)
c_shape.append(chn_shp)
p_shp = np.prod(p_shape, axis=0)
p_srd = p_shp[0:2] // p_shp[2:]
assert np.all(p_shp[0:2] == np.asarray(x_shape[1:3])), 'The p_shape is wrong!'
c_shp = np.prod(c_shape, axis=0)
c_srd = c_shp[0] // c_shp[1]
assert c_shp[0] == x_shape[-1], 'The c_shape is wrong!'
def pos_group_bn1(tensor_in, layer):
with tf.variable_scope('pos_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取空间特征
x_shape = get_shape(tensor_in) #(N, H, W, C)
pra_num = x_shape[0] * x_shape[-1] # N*C
fet_pos = tf.transpose(tensor_in, [0, 3, 1, 2]) #(N, C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[1], x_shape[2]]) #(N*C, H, W)
def dispatch(fet_pos):
#fet_pos --> (H, W)
fet_shp = np.asarray(x_shape[1:3]) #[H, W]
fet_pos = tf.reshape(fet_pos, [1]+x_shape[1:3]+[1]) #(1, H, W, 1)
for i in range(len(p_shape)):
pos_shp = p_shape[i]
fet_shp = fet_shp // np.asarray(pos_shp[0:2]) #[H', W']
params['conv'] = {'number': pos_shp[2]*pos_shp[3], 'shape': pos_shp[0:2], 'rate': 1, 'stride': pos_shp[0:2], \
'padding': 'VALID', 'use_bias': False}
fet_pos = conv1(fet_pos, i, params, mtrain) #(C"', H', W', C")
fet_pos = tf.transpose(fet_pos, [0, 3, 1, 2]) #(C"', C", H', W')把已关联特征放到下层继续关联剩下的
fet_pos = tf.reshape(fet_pos, [-1]+list(fet_shp)+[1]) #(C"', H', W', 1)
return fet_pos
fet_pos = tf.map_fn(dispatch, fet_pos, dtype=tf.float32, parallel_iterations=pra_num, \
back_prop=True, swap_memory=True, infer_shape=True) #(N*C, C"', 1, 1, 1)
fet_pos = tf.reshape(fet_pos, [-1, p_shp[2]*p_shp[3]]) #(N*C, C"')C"'是空间特征,做BN时应该对之外的维度做
fet_pos = batchnorm1(fet_pos, 0, params, mtrain) #(N*C, C"')
shape = [-1] + list(np.asarray(p_shape)[:, 2:].reshape(-1)) #还原空间维度
fet_pos = tf.reshape(fet_pos, shape) #(N*C, H, W)
perm = [0] + [x for x in range(1, 1+2*len(p_shape), 2)][::-1] + [x for x in range(2, 2+2*len(p_shape), 2)][::-1]
fet_pos = tf.transpose(fet_pos, perm) #(N*C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[-1], p_shp[2], p_shp[3]]) #(N, C, H, W)
fet_pos = tf.transpose(fet_pos, [0, 2, 3, 1]) #(N, H, W, C)
print_activations(fet_pos)
return fet_pos
def chn_group_bn1(tensor_in, layer):
with tf.variable_scope('chn_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取通道特征
x_shape = get_shape(tensor_in) #(N, H, W, C)
pra_num = x_shape[0] * x_shape[1] * x_shape[2] # N*H*W
fet_chn = tf.reshape(tensor_in, [-1, x_shape[-1]]) #(N*H*W, C)
def dispatch(fet_chn):
#fet_chn --> (C)
fet_shp = x_shape[-1] #[C]
fet_chn = tf.reshape(fet_chn, [1]+x_shape[-1]+[1]) #(1, C, 1)
for i in range(len(c_shape)):
chn_shp = c_shape[i]
fet_shp = fet_shp // chn_shp[0] #[C']
params['conv'] = {'number': chn_shp[1], 'shape': chn_shp[0], 'rate': 1, 'stride': chn_shp[0], \
'padding': 'VALID', 'use_bias': False}
fet_chn = conv2(fet_chn, i, params, mtrain) #(C"', C', C")
fet_chn = tf.transpose(fet_chn, [0, 2, 1]) #(C"', C", C')把已关联特征放到下层继续关联剩下的
fet_chn = tf.reshape(fet_chn, [-1, fet_shp, 1]) #(C"', C', 1)
return fet_chn
fet_chn = tf.map_fn(dispatch, fet_chn, dtype=tf.float32, parallel_iterations=pra_num, \
back_prop=True, swap_memory=True, infer_shape=True) #(N*H*W, C"', 1, 1)
fet_chn = tf.reshape(fet_chn, [-1, c_shp[1]]) #(N*H*W, C"')C"'是通道特征,做BN时应该对之外的维度做
fet_chn = batchnorm1(fet_chn, 1, params, mtrain) #(N*H*W, C"')
shape = [-1] + list(np.asarray(c_shape)[:, 1:].reshape(-1)) #还原通道维度
fet_chn = tf.reshape(fet_chn, shape) #(N*H*W, C)
perm = [0] + [x for x in range(1, 1+len(c_shape), 1)][::-1]
fet_chn = tf.transpose(fet_chn, perm) #(N*H*W, C)
fet_chn = tf.reshape(fet_chn, [-1, x_shape[1], x_shape[2], c_shp[1]]) #(N, H, W, C)
print_activations(fet_chn)
return fet_chn
with tf.variable_scope('group_bn1_'+str(layer), reuse=reuse) as scope:
fet_pos0 = pos_group_bn1(tensor_in, 0)
fet_chn0 = chn_group_bn1(tensor_in, 0)
fet_pos1 = pos_group_bn1(fet_chn0, 1)
fet_chn1 = chn_group_bn1(fet_pos0, 1)
tensor_out = fet_pos1 + fet_chn1
print_activations(tensor_out)
return tensor_out
def group_bn_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_bn_relu1_'+str(layer)) as scope:
bn = group_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(bn, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
output_shape = params['group_unit']['output_shape'] #(H', W', C')
bottle_shape = params['group_unit']['bottle_shape'] #(H", W", C")
filter_shape = params['group_unit']['filter_shape'] #(h, w, c)
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in) #(N, H, W, C)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(x_shape[1:]) != np.asarray(output_shape)): #深度可分离卷积!!!
number = output_shape[-1] // x_shape[-1]
shape = [3, 3]
stride = np.asarray(x_shape[1:3]) // np.asarray(output_shape[0:2])
params['conv'] = {'number': number, 'shape': shape, 'rate': [1, 1], 'stride': stride, 'padding': 'SAME'}
shortcut = conv_bn3(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(tensor_in, 0, params, mtrain)
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(residual, 1, params, mtrain)
params['group'] = {'output_shape': output_shape, 'filter_shape': filter_shape}
residual = group_bn1(residual, 0, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
output_shape, bottle_shape, filter_shape, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
params['group_unit'] = {'output_shape':output_shape, 'bottle_shape':bottle_shape, 'filter_shape':filter_shape}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
def atten1(tensor_in=None, layer=0, params=None, mtrain=None):
#对空间关联,group_bn在独立的通道上关联空间,attention在所有的通道上关联空间
#对通道关联,group_bn在独立的空间上关联通道,attention在所有的空间上关联通道
#因此,attention是group_bn的集大成者
#attention作用的层数要把握好,空间面积和通道数都不宜过大过小
#
output_shape = params['atten']['output_shape'] #[H', W', C']
attent_shape = params['atten']['attent_shape'] #[h, w, c] [8, 8, 64]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in)
#降维时刚进入堆叠层时就降,升维时最终输出堆叠层时才升,以节省内存,在信息量不过损失的情况下,保持bottleneck连接
out_in = np.asarray(output_shape) / np.asarray(x_shape[1:])
assert (out_in[0]<=1 and out_in[1]<=1) or (out_in[0]>1 and out_in[1]>1), 'The space shape of output is wrong!'
#空间关联次数p_num
p_add = False
p_num = np.log(np.asarray(x_shape[1:3])) // np.log(np.asarray(attent_shape[0:2]))
p_num = p_num.astype(dtype=np.int32, copy=False)
assert p_num[0] == p_num[1], 'The space shape of attent is wrong!'
p_num = p_num[0]
p_shp = np.power(np.asarray(attent_shape[0:2]), p_num)
p_shp = p_shp.astype(dtype=np.int32, copy=False)
p_res = np.asarray(x_shape[1:3]) // p_shp
p_shp = p_shp * p_res
assert np.all(p_shp == np.asarray(x_shape[1:3])), 'The space shape of attent is wrong!'
if np.any(p_res != np.array([1, 1])) or np.all(np.asarray(x_shape[1:3]) == np.array([1, 1])):
p_num = p_num + 1
p_add = True
#通道关联次数c_num
c_add = False
c_num = np.log(x_shape[-1]) // np.log(attent_shape[-1])
c_num = c_num.astype(dtype=np.int32, copy=False)
c_shp = np.power(attent_shape[-1], c_num)
c_shp = c_shp.astype(dtype=np.int32, copy=False)
c_res = x_shape[-1] // c_shp
c_shp = c_shp * c_res
assert c_shp == x_shape[-1], 'The channel shape of attent is wrong!'
if c_res != 1 or x_shape[-1] == 1:
c_num = c_num + 1
c_add = True
#空间关联参数,本着pos_shp0和pos_shp1差异尽可能小的原则,这样参数利用率高
p_shape = [] #[[8, 8, p1, p2]]*p_number
for i in range(p_num):
pos_shp = []
#若面积增大且有p_res,则将其放到最后一层;若面积减小或相等且有p_res,则将其放到第一层
#out_in[0]<=1 or out_in[0]>1肯定会发生,i==0和i==p_num-1也肯定会经过,p_res存在的话肯定会得到处理
pos_shp0 = np.asarray(attent_shape[0:2])
if (i == 0 and out_in[0] <= 1) or (i == p_num - 1 and out_in[0] > 1):
if p_add:
pos_shp0 = p_res
pos_shp1 = pos_shp0 * out_in[0:2]
pos_shp1 = pos_shp1.astype(dtype=np.int32, copy=False)
assert np.all(pos_shp1 / pos_shp0 == out_in[0:2]), 'The space shape of output is wrong!'
else:
pos_shp1 = pos_shp0
pos_shp.extend(list(pos_shp0))
pos_shp.extend(list(pos_shp1))
p_shape.append(pos_shp)
#通道关联参数,本着chn_shp0和chn_shp1差异尽可能小的原则,这样参数利用率高
c_shape = [] #[[16, c]]*c_number
for i in range(c_num):
chn_shp = []
#若通道增多且有c_res,则将其放到最后一层;若通道减少或相等且有c_res,则将其放到第一层
#out_in[-1]<=1 or out_in[-1]>1肯定会发生,i==0和i==c_num-1也肯定会经过,c_res存在的话肯定会得到处理
chn_shp0 = attent_shape[-1]
if (i == 0 and out_in[-1] <= 1) or (i == c_num -1 and out_in[-1] > 1):
if c_add:
chn_shp0 = c_res
chn_shp1 = chn_shp0 * out_in[-1]
chn_shp1 = chn_shp1.astype(dtype=np.int32, copy=False)
assert chn_shp1 / chn_shp0 == out_in[-1], 'The channel shape of output is wrong!'
else:
chn_shp1 = chn_shp0
chn_shp.append(chn_shp0)
chn_shp.append(chn_shp1)
c_shape.append(chn_shp)
p_shp = np.prod(p_shape, axis=0)
p_srd = p_shp[0:2] // p_shp[2:]
assert np.all(p_shp[0:2] == np.asarray(x_shape[1:3])), 'The p_shape is wrong!'
c_shp = np.prod(c_shape, axis=0)
c_srd = c_shp[0] // c_shp[1]
assert c_shp[0] == x_shape[-1], 'The c_shape is wrong!'
def pos_atten1(tensor_in, layer):
with tf.variable_scope('pos_atten1_'+str(layer), reuse=reuse) as scope:
#关联空间特征
x_shape = get_shape(tensor_in) #[N, H, W, C]
pra_num = x_shape[0] # N
def dispatch(fet_pos):
#fet_pos --> (H, W, C)
fet_shp = np.asarray(x_shape[1:3]) #[H, W]
for i in range(len(p_shape)):
pos_shp = p_shape[i]
fet_shp = fet_shp // np.asarray(pos_shp[0:2]) #[H', W']
pra_num = fet_shp[0] * fet_shp[1] # H'*W'
def cond(i, fet_out):
c = tf.less(i, fet_shp[0] * fet_shp[1])
return c
def body(i, fet_out):
ycd = i // fet_shp[1]
xcd = i % fet_shp[1]
beg = [ycd*pos_shp[0], xcd*pos_shp[1], 0]
siz = [ pos_shp[0], pos_shp[1], -1]
fet0 = tf.slice(fet_pos, beg, siz) #(h, w, C)
if np.any(np.asarray(pos_shp[0:2]) != np.asarray(pos_shp[2:])):
fet1 = tf.image.resize_images(fet0, pos_shp[2:], method=tf.image.ResizeMethod.BILINEAR, \
align_corners=False, preserve_aspect_ratio=False)
else:
fet1 = fet0
return [i+1, fet_out]
i = tf.constant(0)
[i, fet_out] = tf.while_loop(cond, body, loop_vars=[i, fet_out], shape_invariants=None, \
parallel_iterations=pra_num, back_prop=True, swap_memory=True)
fet_pos = fet_out
return fet_pos
def pos_group_bn1(tensor_in, layer):
with tf.variable_scope('pos_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取空间特征
x_shape = get_shape(tensor_in) #(N, H, W, C)
pra_num = x_shape[0] * x_shape[-1] # N*C
fet_pos = tf.transpose(tensor_in, [0, 3, 1, 2]) #(N, C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[1], x_shape[2]]) #(N*C, H, W)
def dispatch(fet_pos):
fet_shp = np.asarray(x_shape[1:3]) #[H, W]
fet_pos = tf.reshape(fet_pos, [1]+x_shape[1:3]+[1]) #(1, H, W, 1)
for i in range(len(p_shape)):
pos_shp = p_shape[i] #[H', W']
fet_shp = fet_shp // np.asarray(pos_shp[0:2])
params['conv'] = {'number': pos_shp[2]*pos_shp[3], 'shape': pos_shp[0:2], 'rate': 1, 'stride': pos_shp[0:2], \
'padding': 'VALID', 'use_bias': False}
fet_pos = conv1(fet_pos, i, params, mtrain) #(C"', H', W', C")
fet_pos = tf.transpose(fet_pos, [0, 3, 1, 2]) #(C"', C", H', W')把已关联特征放到下层继续关联剩下的
fet_pos = tf.reshape(fet_pos, [-1]+list(fet_shp)+[1]) #(C"', H', W', 1)
return fet_pos
fet_pos = tf.map_fn(dispatch, fet_pos, dtype=tf.float32, parallel_iterations=pra_num, \
back_prop=True, swap_memory=True, infer_shape=True) #(N*C, C"', 1, 1, 1)
fet_pos = tf.reshape(fet_pos, [-1, p_shp[2]*p_shp[3]]) #(N*C, C"')C"'是空间特征,做BN时应该对之外的维度做
fet_pos = batchnorm1(fet_pos, 0, params, mtrain) #(N*C, C"')
shape = [-1] + list(np.asarray(p_shape)[:, 2:].reshape(-1)) #还原空间维度
fet_pos = tf.reshape(fet_pos, shape) #(N*C, H, W)
perm = [0] + [x for x in range(1, 1+2*len(p_shape), 2)][::-1] + [x for x in range(2, 2+2*len(p_shape), 2)][::-1]
fet_pos = tf.transpose(fet_pos, perm) #(N*C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[-1], p_shp[2], p_shp[3]]) #(N, C, H, W)
fet_pos = tf.transpose(fet_pos, [0, 2, 3, 1]) #(N, H, W, C)
print_activations(fet_pos)
return fet_pos
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
shape = params['attent_unit']['shape']
rate = params['attent_unit']['rate']
depth_key = params['attent_unit']['depth_key']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list()
depth_input = x_shape[-1]
height = x_shape[ 1]
width = x_shape[ 2]
with tf.variable_scope('attent_unit1_'+str(layer)) as scope:
#使用3x3conv隔离特征
params['conv'] = {'number':depth_input, 'shape':[3, 3], 'rate':1, 'stride':[1, 1], 'padding':'SAME'}
tensor_in = conv_bn_relu1(tensor_in, 0, params, mtrain)
tensor_in = conv_bn_relu1(tensor_in, 1, params, mtrain)
#对keys的关联应该在放入位置向量之前,位置向量主要服务于关联中心点,对中心特征向量和其之外的特征向量之间的位置关系做描述
params['conv'] = {'number':depth_key, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID', 'use_bias': True}
tensor_key = conv1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':depth_input, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID', 'use_bias': True}
tensor_value = conv1(tensor_in, 0, params, mtrain)
#获取relative_position_embeddings #(64, 64, 64)
PE = tf.get_variable(name='PE', shape=shape+depth_key, dtype=dtype, \
#initializer=tf.truncated_normal_initializer(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
regularizer=tf.contrib.layers.l2_regularizer(reg), trainable=trainable)
tensor_out = tf.TensorArray(dtype=tf.float32, size=height*width, dynamic_size=False, clear_after_read=True, \
infer_shape=True, element_shape=[depth_input+depth_key], colocate_with_first_write_call=True)
def cond(i, tensor_out):
c = tf.less(i, height*width)
return c
def body(i, tensor_out):
ycd = i // width
xcd = i % width
ymn = ycd - ((shape[0] - 1) // 2) * rate
xmn = xcd - ((shape[1] - 1) // 2) * rate
ycds = tf.concat([[ymn], tf.tile([rate], [shape[0]-1])], axis=0)
xcds = tf.concat([[xmn], tf.tile([rate], [shape[1]-1])], axis=0)
ycds = tf.cumsum(ycds, axis=0, exclusive=False, reverse=False)
xcds = tf.cumsum(xcds, axis=0, exclusive=False, reverse=False)
idxs = tf.where(tf.logical_and(ycds>=0, ycds<height))
ycds = tf.gather_nd(ycds, idxs)
idxs = tf.where(tf.logical_and(xcds>=0, xcds<width ))
xcds = tf.gather_nd(xcds, idxs)
ycds = tf.tile(ycds[:, tf.newaxis], [1, tf.shape(xcds)[0]])
xcds = tf.tile(xcds[tf.newaxis, :], [tf.shape(ycds)[0], 1])
crd0 = tf.stack([ycd, xcd], axis=0) #(2) 实际中心
crds0 = tf.concat([ycds, xcds], axis=-1) #(h, w, 2) 实际坐标
fets0 = tf.gather_nd(tensor_value, crds0) #(h, w, c) 实际特征
fets3 = tf.gather_nd(tensor_key, crds0) #(h, w, c') 实际特征
crd1 = (shape - 1) // 2 #(2) 相对中心
crds1 = (crds0 - crd0) // rate #(h, w, 2) 相对坐标
crds1 = crds1 + crd1 #(h, w, 2) 相对坐标
fets1 = tf.gather_nd(PE, crds1) #(h, w, c) 相对特征
#fets2= tf.concat([fets0, fets1], axis=-1) #(h, w, c'') 融合特征
crd3 = crd0 - crds0[0, 0] #crd、crds下标换成1也一样 #(2) 相对坐标
fet3 = tf.gather_nd(fets3, crd3) #(c') 相对中心
#计算注意力
att3 = tf.einsum('ijk,k->ij', fets3, fet3) #(h, w)
att3 = tf.exp(att3 / tf.sqrt(depth_key)) #(h, w)
att3 = att3 / tf.reduce_sum(att3) #(h, w)
fet0 = tf.einsum('ij,ijk->k', att3, fets0) #(c)
fet1 = tf.einsum('ij,ijk->k', att3, fets1) #(c')
fet2 = tf.concat([fet0, fet1], axis=-1) #(c'')
#fet2 = tf.einsum('ij,ijk->k', att3, fets2) #(c'')
tensor_out = tensor_out.write(i, fet2) #(h, w, c')
return [i+1, tensor_out]
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=128, back_prop=True, swap_memory=True)
#使用1x1conv进行特征和位置向量的融合
params['conv'] = {'number':depth_input, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
tensor_out = conv_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(tensor_out + tensor_in)
return tensor_out
return fet_pos
def chn_atten1(tensor_in, layer):
with tf.variable_scope('chn_atten1_'+str(layer), reuse=reuse) as scope:
print_activations(fet_pos)
return fet_pos
with tf.variable_scope('atten1_'+str(layer), reuse=reuse) as scope:
fet_pos0 = pos_atten1(tensor_in, 0)
fet_chn0 = chn_atten1(tensor_in, 0)
fet_pos1 = pos_atten1(fet_chn0, 1)
fet_chn1 = chn_atten1(fet_pos0, 1)
tensor_out = fet_pos1 + fet_chn1
print_activations(tensor_out)
return tensor_out
def group_bn1(tensor_in=None, layer=0, params=None, mtrain=None):
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#不需要rate,因为本卷积关联整个图像
#不需要pad, 因为本卷积只利用图像中的有效像素
#stride不由卷积控制,而由avg_pool控制,先做avg_pool,再做relu
#batchnorm只针对batch做,不针对通道或空间,因为通道和空间本身都是特征
#当面积或通道数较小时,没有必要分块进行全连接
output_shape = params['group']['output_shape'] #[H', W', C']
filter_shape = params['group']['filter_shape'] #[h, w, c] [8, 8, 64]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in)
#降维时刚进入堆叠层时就降,升维时最终输出堆叠层时才升,以节省内存,在信息量不过损失的情况下,保持bottleneck连接
out_in = np.asarray(output_shape) / np.asarray(x_shape[1:])
assert (out_in[0]<=1 and out_in[1]<=1) or (out_in[0]>1 and out_in[1]>1), 'The space shape of output is wrong!'
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#空间关联次数p_num
p_add = False
p_num = np.log(np.asarray(x_shape[1:3])) // np.log(np.asarray(filter_shape[0:2]))
p_num = p_num.astype(dtype=np.int32, copy=False)
assert p_num[0] == p_num[1], 'The space shape of filter is wrong!'
p_num = p_num[0]
p_shp = np.power(np.asarray(filter_shape[0:2]), p_num)
p_shp = p_shp.astype(dtype=np.int32, copy=False)
p_res = np.asarray(x_shape[1:3]) // p_shp
p_shp = p_shp * p_res
assert np.all(p_shp == np.asarray(x_shape[1:3])), 'The space shape of filter is wrong!'
if np.any(p_res != np.array([1, 1])) or np.all(np.asarray(x_shape[1:3]) == np.array([1, 1])):
p_num = p_num + 1
p_add = True
#通道关联次数c_num
c_add = False
c_num = np.log(x_shape[-1]) // np.log(filter_shape[-1])
c_num = c_num.astype(dtype=np.int32, copy=False)
c_shp = np.power(filter_shape[-1], c_num)
c_shp = c_shp.astype(dtype=np.int32, copy=False)
c_res = x_shape[-1] // c_shp
c_shp = c_shp * c_res
assert c_shp == x_shape[-1], 'The channel shape of filter is wrong!'
if c_res != 1 or x_shape[-1] == 1:
c_num = c_num + 1
c_add = True
#空间关联参数,本着pos_shp0和pos_shp1差异尽可能小的原则,这样参数利用率高
p_shape = [] #[[8, 8, p1, p2]]*p_number,只针对空间,8*8 × p1*p2 的全连接共p_number次,使用conv2d
for i in range(p_num):
pos_shp = []
#若面积增大且有p_res,则将其放到最后一层;若面积减小或相等且有p_res,则将其放到第一层
#out_in[0]<=1 or out_in[0]>1肯定会发生,i==0和i==p_num-1也肯定会经过,p_res存在的话肯定会得到处理
pos_shp0 = np.asarray(filter_shape[0:2])
if (i == 0 and out_in[0] <= 1) or (i == p_num - 1 and out_in[0] > 1):
if p_add:
pos_shp0 = p_res
pos_shp1 = pos_shp0 * out_in[0:2]
pos_shp1 = pos_shp1.astype(dtype=np.int32, copy=False)
assert np.all(pos_shp1 / pos_shp0 == out_in[0:2]), 'The space shape of output is wrong!'
else:
pos_shp1 = pos_shp0
pos_shp.extend(list(pos_shp0))
pos_shp.extend(list(pos_shp1))
p_shape.append(pos_shp)
#通道关联参数,本着chn_shp0和chn_shp1差异尽可能小的原则,这样参数利用率高
c_shape = [] #[[16, c]]*c_number,只针对通道,16 × c 的全连接共c_number次,使用conv1d
for i in range(c_num):
chn_shp = []
#若通道增多且有c_res,则将其放到最后一层;若通道减少或相等且有c_res,则将其放到第一层
#out_in[-1]<=1 or out_in[-1]>1肯定会发生,i==0和i==c_num-1也肯定会经过,c_res存在的话肯定会得到处理
chn_shp0 = filter_shape[-1]
if (i == 0 and out_in[-1] <= 1) or (i == c_num -1 and out_in[-1] > 1):
if c_add:
chn_shp0 = c_res
chn_shp1 = chn_shp0 * out_in[-1]
chn_shp1 = chn_shp1.astype(dtype=np.int32, copy=False)
assert chn_shp1 / chn_shp0 == out_in[-1], 'The channel shape of output is wrong!'
else:
chn_shp1 = chn_shp0
chn_shp.append(chn_shp0)
chn_shp.append(chn_shp1)
c_shape.append(chn_shp)
p_shp = np.prod(p_shape, axis=0)
p_srd = p_shp[0:2] // p_shp[2:]
assert np.all(p_shp[0:2] == np.asarray(x_shape[1:3])), 'The p_shape is wrong!'
c_shp = np.prod(c_shape, axis=0)
c_srd = c_shp[0] // c_shp[1]
assert c_shp[0] == x_shape[-1], 'The c_shape is wrong!'
def pos_group_bn1(tensor_in, layer):
with tf.variable_scope('pos_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取空间特征
x_shape = get_shape(tensor_in) #[N, H, W, C]
pra_num = x_shape[0] * x_shape[-1] # N*C
fet_pos = tf.transpose(tensor_in, [0, 3, 1, 2]) #(N, C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[1], x_shape[2]]) #(N*C, H, W)
def dispatch(fet_pos):
#fet_pos --> (H, W)
fet_shp = np.asarray(x_shape[1:3]) #[H, W]
fet_pos = tf.reshape(fet_pos, [1]+x_shape[1:3]+[1]) #(1, H, W, 1)
for i in range(len(p_shape)):
pos_shp = p_shape[i]
fet_shp = fet_shp // np.asarray(pos_shp[0:2]) #[H', W']
params['conv'] = {'number': pos_shp[2]*pos_shp[3], 'shape': pos_shp[0:2], 'rate': 1, 'stride': pos_shp[0:2], \
'padding': 'VALID', 'use_bias': False}
fet_pos = conv1(fet_pos, i, params, mtrain) #(C"', H', W', C")
fet_pos = tf.transpose(fet_pos, [0, 3, 1, 2]) #(C"', C", H', W')把已关联特征放到下层继续关联剩下的
fet_pos = tf.reshape(fet_pos, [-1]+list(fet_shp)+[1]) #(C"', H', W', 1)
return fet_pos
fet_pos = tf.map_fn(dispatch, fet_pos, dtype=tf.float32, parallel_iterations=pra_num, \
back_prop=True, swap_memory=True, infer_shape=True) #(N*C, C"', 1, 1, 1)
fet_pos = tf.reshape(fet_pos, [-1, p_shp[2]*p_shp[3]]) #(N*C, C"')C"'是空间特征,做BN时应该对之外的维度做
fet_pos = batchnorm1(fet_pos, 0, params, mtrain) #(N*C, C"')
shape = [-1] + list(np.asarray(p_shape)[:, 2:].reshape(-1)) #还原空间维度
fet_pos = tf.reshape(fet_pos, shape) #(N*C, H, W)
perm = [0] + [x for x in range(1, 1+2*len(p_shape), 2)][::-1] + [x for x in range(2, 2+2*len(p_shape), 2)][::-1]
fet_pos = tf.transpose(fet_pos, perm) #(N*C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[-1], p_shp[2], p_shp[3]]) #(N, C, H, W)
fet_pos = tf.transpose(fet_pos, [0, 2, 3, 1]) #(N, H, W, C)
print_activations(fet_pos)
return fet_pos
def chn_group_bn1(tensor_in, layer):
with tf.variable_scope('chn_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取通道特征
x_shape = get_shape(tensor_in) #[N, H, W, C]
pra_num = x_shape[0] * x_shape[1] * x_shape[2] # N*H*W
fet_chn = tf.reshape(tensor_in, [-1, x_shape[-1]]) #(N*H*W, C)
def dispatch(fet_chn):
#fet_chn --> (C)
fet_shp = x_shape[-1] #[C]
fet_chn = tf.reshape(fet_chn, [1]+x_shape[-1]+[1]) #(1, C, 1)
for i in range(len(c_shape)):
chn_shp = c_shape[i]
fet_shp = fet_shp // chn_shp[0] #[C']
params['conv'] = {'number': chn_shp[1], 'shape': chn_shp[0], 'rate': 1, 'stride': chn_shp[0], \
'padding': 'VALID', 'use_bias': False}
fet_chn = conv2(fet_chn, i, params, mtrain) #(C"', C', C")
fet_chn = tf.transpose(fet_chn, [0, 2, 1]) #(C"', C", C')把已关联特征放到下层继续关联剩下的
fet_chn = tf.reshape(fet_chn, [-1, fet_shp, 1]) #(C"', C', 1)
return fet_chn
fet_chn = tf.map_fn(dispatch, fet_chn, dtype=tf.float32, parallel_iterations=pra_num, \
back_prop=True, swap_memory=True, infer_shape=True) #(N*H*W, C"', 1, 1)
fet_chn = tf.reshape(fet_chn, [-1, c_shp[1]]) #(N*H*W, C"')C"'是通道特征,做BN时应该对之外的维度做
fet_chn = batchnorm1(fet_chn, 1, params, mtrain) #(N*H*W, C"')
shape = [-1] + list(np.asarray(c_shape)[:, 1:].reshape(-1)) #还原通道维度
fet_chn = tf.reshape(fet_chn, shape) #(N*H*W, C)
perm = [0] + [x for x in range(1, 1+len(c_shape), 1)][::-1]
fet_chn = tf.transpose(fet_chn, perm) #(N*H*W, C)
fet_chn = tf.reshape(fet_chn, [-1, x_shape[1], x_shape[2], c_shp[1]]) #(N, H, W, C)
print_activations(fet_chn)
return fet_chn
with tf.variable_scope('group_bn1_'+str(layer), reuse=reuse) as scope:
fet_pos0 = pos_group_bn1(tensor_in, 0)
fet_chn0 = chn_group_bn1(tensor_in, 0)
fet_pos1 = pos_group_bn1(fet_chn0, 1)
fet_chn1 = chn_group_bn1(fet_pos0, 1)
tensor_out = fet_pos1 + fet_chn1
print_activations(tensor_out)
return tensor_out
def group_bn_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_bn_relu1_'+str(layer)) as scope:
bn = group_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(bn, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
output_shape = params['group_unit']['output_shape'] #(H', W', C')
bottle_shape = params['group_unit']['bottle_shape'] #(H", W", C")
filter_shape = params['group_unit']['filter_shape'] #(h, w, c)
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in) #(N, H, W, C)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(x_shape[1:]) != np.asarray(output_shape)): #深度可分离卷积!!!
number = output_shape[-1] // x_shape[-1]
shape = [3, 3]
stride = np.asarray(x_shape[1:3]) // np.asarray(output_shape[0:2])
params['conv'] = {'number': number, 'shape': shape, 'rate': [1, 1], 'stride': stride, 'padding': 'SAME'}
shortcut = conv_bn3(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(tensor_in, 0, params, mtrain)
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(residual, 1, params, mtrain)
params['group'] = {'output_shape': output_shape, 'filter_shape': filter_shape}
residual = group_bn1(residual, 0, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
output_shape, bottle_shape, filter_shape, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
params['group_unit'] = {'output_shape':output_shape, 'bottle_shape':bottle_shape, 'filter_shape':filter_shape}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
def group_bn1(tensor_in=None, layer=0, params=None, mtrain=None):
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#不需要rate,因为本卷积关联整个图像
#不需要pad, 因为本卷积只利用图像中的有效像素
#stride不由卷积控制,而由avg_pool控制,先做avg_pool,再做relu
#batchnorm只针对batch做,不针对通道或空间,因为通道和空间本身都是特征
#当面积或通道数较小时,没有必要分块进行全连接
output_shape = params['group']['output_shape'] #[H', W', C']
filter_shape = params['group']['filter_shape'] #[h, w, c] [8, 8, 64]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
#x_shape= tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in)
#降维时刚进入堆叠层时就降,升维时最终输出堆叠层时才升,以节省内存,在信息量不过损失的情况下,保持bottleneck连接
out_in = np.asarray(output_shape) / np.asarray(x_shape[1:])
assert (out_in[0]<=1 and out_in[1]<=1) or (out_in[0]>1 and out_in[1]>1), 'The space shape of output is wrong!'
#保证∏(i=1->n)xi = d(d为原本的参数维度,比如空间的面积或通道的数量,从而进行分组,以进行局部全连接),参数复杂度∑(i=1->n)(xi)^2
#空间关联次数p_num
p_add = False
p_num = np.log(np.asarray(x_shape[1:3])) // np.log(np.asarray(filter_shape[0:2]))
p_num = p_num.astype(dtype=np.int32, copy=False)
assert p_num[0] == p_num[1], 'The space shape of filter is wrong!'
p_num = p_num[0]
p_shp = np.power(np.asarray(filter_shape[0:2]), p_num)
p_shp = p_shp.astype(dtype=np.int32, copy=False)
p_res = np.asarray(x_shape[1:3]) // p_shp
p_shp = p_shp * p_res
assert np.all(p_shp == np.asarray(x_shape[1:3])), 'The space shape of filter is wrong!'
if np.any(p_res != np.array([1, 1])) or np.all(np.asarray(x_shape[1:3]) == np.array([1, 1])):
p_num = p_num + 1
p_add = True
#通道关联次数c_num
c_add = False
c_num = np.log(x_shape[-1]) // np.log(filter_shape[-1])
c_num = c_num.astype(dtype=np.int32, copy=False)
c_shp = np.power(filter_shape[-1], c_num)
c_shp = c_shp.astype(dtype=np.int32, copy=False)
c_res = x_shape[-1] // c_shp
c_shp = c_shp * c_res
assert c_shp == x_shape[-1], 'The channel shape of filter is wrong!'
if c_res != 1 or x_shape[-1] == 1:
c_num = c_num + 1
c_add = True
#空间关联参数,本着pos_shp0和pos_shp1差异尽可能小的原则,这样参数利用率高
p_shape = [] #[[8, 8, p1, p2]]*p_number,只针对空间,8*8 × p1*p2 的全连接共p_number次,使用conv2d
for i in range(p_num):
pos_shp = []
#若面积增大且有p_res,则将其放到最后一层;若面积减小或相等且有p_res,则将其放到第一层
#out_in[0]<=1 or out_in[0]>1肯定会发生,i==0和i==p_num-1也肯定会经过,p_res存在的话肯定会得到处理
pos_shp0 = np.asarray(filter_shape[0:2])
if (i == 0 and out_in[0] <= 1) or (i == p_num - 1 and out_in[0] > 1):
if p_add:
pos_shp0 = p_res
pos_shp1 = pos_shp0 * out_in[0:2]
pos_shp1 = pos_shp1.astype(dtype=np.int32, copy=False)
assert np.all(pos_shp1 / pos_shp0 == out_in[0:2]), 'The space shape of output is wrong!'
else:
pos_shp1 = pos_shp0
pos_shp.extend(list(pos_shp0))
pos_shp.extend(list(pos_shp1))
p_shape.append(pos_shp)
#通道关联参数,本着chn_shp0和chn_shp1差异尽可能小的原则,这样参数利用率高
c_shape = [] #[[16, c]]*c_number,只针对通道,16 × c 的全连接共c_number次,使用conv1d
for i in range(c_num):
chn_shp = []
#若通道增多且有c_res,则将其放到最后一层;若通道减少或相等且有c_res,则将其放到第一层
#out_in[-1]<=1 or out_in[-1]>1肯定会发生,i==0和i==c_num-1也肯定会经过,c_res存在的话肯定会得到处理
chn_shp0 = filter_shape[-1]
if (i == 0 and out_in[-1] <= 1) or (i == c_num -1 and out_in[-1] > 1):
if c_add:
chn_shp0 = c_res
chn_shp1 = chn_shp0 * out_in[-1]
chn_shp1 = chn_shp1.astype(dtype=np.int32, copy=False)
assert chn_shp1 / chn_shp0 == out_in[-1], 'The channel shape of output is wrong!'
else:
chn_shp1 = chn_shp0
chn_shp.append(chn_shp0)
chn_shp.append(chn_shp1)
c_shape.append(chn_shp)
p_shp = np.prod(p_shape, axis=0)
p_srd = p_shp[0:2] // p_shp[2:]
assert np.all(p_shp[0:2] == np.asarray(x_shape[1:3])), 'The p_shape is wrong!'
c_shp = np.prod(c_shape, axis=0)
c_srd = c_shp[0] // c_shp[1]
assert c_shp[0] == x_shape[-1], 'The c_shape is wrong!'
def pos_group_bn1(tensor_in, layer):
with tf.variable_scope('pos_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取空间特征(不去管通道)
x_shape = get_shape(tensor_in)
fet_pos = tf.transpose(tensor_in, [0, 3, 1, 2]) #(N, C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[1], x_shape[2], 1]) #(N*C, H, W, 1)
fet_shp = np.asarray(x_shape[1:3])
for i in range(len(p_shape)):
pos_shp = p_shape[i]
fet_shp = fet_shp // np.asarray(pos_shp[0:2])
params['conv'] = {'number': pos_shp[2]*pos_shp[3], 'shape': pos_shp[0:2], 'rate': 1, 'stride': pos_shp[0:2], \
'padding': 'VALID', 'use_bias': False}
fet_pos = conv1(fet_pos, i, params, mtrain) #(N*C, H', W', C")
fet_pos = tf.transpose(fet_pos, [0, 3, 1, 2]) #(N*C, C", H', W')
fet_pos = tf.reshape(fet_pos, [-1, fet_shp[0], fet_shp[1], 1]) #(N*C*C", H', W', 1)
fet_pos = tf.reshape(fet_pos, [-1, p_shp[2]*p_shp[3]]) #(N*C, C"')C"'是空间特征,做BN时应该对之外的维度做
fet_pos = batchnorm1(fet_pos, 0, params, mtrain) #(N*C, C"')
shape = [-1] + list(np.asarray(p_shape)[:, 2:].reshape(-1)) #还原空间维度
fet_pos = tf.reshape(fet_pos, shape) #(N*C, H, W)
perm = [0] + [x for x in range(1, 1+2*len(p_shape), 2)][::-1] + [x for x in range(2, 2+2*len(p_shape), 2)][::-1]
fet_pos = tf.transpose(fet_pos, perm) #(N*C, H, W)
fet_pos = tf.reshape(fet_pos, [-1, x_shape[-1], p_shp[2], p_shp[3]]) #(N, C, H, W)
fet_pos = tf.transpose(fet_pos, [0, 2, 3, 1]) #(N, H, W, C)
print_activations(fet_pos)
return fet_pos
def chn_group_bn1(tensor_in, layer):
with tf.variable_scope('chn_group_bn1_'+str(layer), reuse=reuse) as scope:
#提取通道特征
x_shape = get_shape(tensor_in)
fet_chn = tf.reshape(tensor_in, [-1, x_shape[-1], 1]) #(N*H*W, C, 1)
fet_shp = x_shape[-1]
for i in range(len(c_shape)):
chn_shp = c_shape[i]
fet_shp = fet_shp // chn_shp[0]
params['conv'] = {'number': chn_shp[1], 'shape': chn_shp[0], 'rate': 1, 'stride': chn_shp[0], \
'padding': 'VALID', 'use_bias': False}
fet_chn = conv2(fet_chn, i, params, mtrain) #(N*H*W, C', C")
fet_chn = tf.transpose(fet_chn, [0, 2, 1]) #(N*H*W, C", C')把已关联特征放到下层,继续关联剩下的
fet_chn = tf.reshape(fet_chn, [-1, fet_shp, 1]) #(N*H*W*C", C', 1)
fet_chn = tf.reshape(fet_chn, [-1, c_shp[1]]) #(N*H*W, C"')C"'是通道特征,做BN时应该对之外的维度做
fet_chn = batchnorm1(fet_chn, 1, params, mtrain) #(N*H*W, C"')
shape = [-1] + list(np.asarray(c_shape)[:, 1:].reshape(-1)) #还原通道维度
fet_chn = tf.reshape(fet_chn, shape) #(N*H*W, C)
perm = [0] + [x for x in range(1, 1+len(c_shape), 1)][::-1]
fet_chn = tf.transpose(fet_chn, perm) #(N*H*W, C)
fet_chn = tf.reshape(fet_chn, [-1, x_shape[1], x_shape[2], c_shp[1]]) #(N, H, W, C")
print_activations(fet_chn)
return fet_chn
with tf.variable_scope('group_bn1_'+str(layer), reuse=reuse) as scope:
fet_pos0 = pos_group_bn1(tensor_in, 0)
fet_chn0 = chn_group_bn1(tensor_in, 0)
fet_pos1 = pos_group_bn1(fet_chn0, 1)
fet_chn1 = chn_group_bn1(fet_pos0, 1)
tensor_out = fet_pos1 + fet_chn1
print_activations(tensor_out)
return tensor_out
def group_bn_relu1(tensor_in=None, layer=0, params=None, mtrain=None):
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
with tf.variable_scope('group_bn_relu1_'+str(layer)) as scope:
bn = group_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(bn, 0, params, mtrain)
return tensor_out
def group_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
output_shape = params['group_unit']['output_shape'] #(H', W', C')
bottle_shape = params['group_unit']['bottle_shape'] #(H", W", C")
filter_shape = params['group_unit']['filter_shape'] #(h, w, c)
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list() #(N, H, W, C)
x_shape = get_shape(tensor_in) #(N, H, W, C)
with tf.variable_scope('group_unit1_'+str(layer)) as scope:
if np.any(np.asarray(x_shape[1:]) != np.asarray(output_shape)): #深度可分离卷积!!!
number = output_shape[-1] // x_shape[-1]
shape = [3, 3]
stride = np.asarray(x_shape[1:3]) // np.asarray(output_shape[0:2])
params['conv'] = {'number': number, 'shape': shape, 'rate': [1, 1], 'stride': stride, 'padding': 'SAME'}
shortcut = conv_bn3(tensor_in, 0, params, mtrain)
else:
shortcut = tensor_in
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(tensor_in, 0, params, mtrain)
params['group'] = {'output_shape': bottle_shape, 'filter_shape': filter_shape}
residual = group_bn_relu1(residual, 1, params, mtrain)
params['group'] = {'output_shape': output_shape, 'filter_shape': filter_shape}
residual = group_bn1(residual, 0, params, mtrain)
tensor_out = relu1(shortcut+residual, 0, params, mtrain)
return tensor_out
def group_block1(tensor_in=None, layer=0, params=None, mtrain=None):
block_setting = params['group_block']['block_setting']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
tensor_out = tensor_in
out_list = []
for i, block in enumerate(block_setting):
output_shape, bottle_shape, filter_shape, unit_number, unit_trainable = block
params['com']['trainable'] = unit_trainable
with tf.variable_scope('group_block1_'+str(layer)+'_'+str(i)) as scope:
for j in range(unit_number):
params['group_unit'] = {'output_shape':output_shape, 'bottle_shape':bottle_shape, 'filter_shape':filter_shape}
tensor_out = group_unit1(tensor_out, j, params, mtrain)
out_list.append(tensor_out)
return out_list
def attent_unit1(tensor_in=None, layer=0, params=None, mtrain=None):
reg = params['com']['reg']
wscale = params['com']['wscale']
dtype = params['com']['dtype']
reuse = params['com']['reuse']
is_train = params['com']['is_train']
trainable = params['com']['trainable']
shape = params['attent_unit']['shape']
rate = params['attent_unit']['rate']
depth_key = params['attent_unit']['depth_key']
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = tensor_in.get_shape().as_list()
depth_input = x_shape[-1]
height = x_shape[ 1]
width = x_shape[ 2]
with tf.variable_scope('attent_unit1_'+str(layer)) as scope:
#使用3x3conv隔离特征
params['conv'] = {'number':depth_input, 'shape':[3, 3], 'rate':1, 'stride':[1, 1], 'padding':'SAME'}
tensor_in = conv_bn_relu1(tensor_in, 0, params, mtrain)
tensor_in = conv_bn_relu1(tensor_in, 1, params, mtrain)
#对keys的关联应该在放入位置向量之前,位置向量主要服务于关联中心点,对中心特征向量和其之外的特征向量之间的位置关系做描述
params['conv'] = {'number':depth_key, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID', 'use_bias': True}
tensor_key = conv1(tensor_in, 0, params, mtrain)
params['conv'] = {'number':depth_input, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID', 'use_bias': True}
tensor_value = conv1(tensor_in, 0, params, mtrain)
#获取relative_position_embeddings #(64, 64, 64)
PE = tf.get_variable(name='PE', shape=shape+depth_key, dtype=dtype, \
#initializer=tf.truncated_normal_initializer(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
regularizer=tf.contrib.layers.l2_regularizer(reg), trainable=trainable)
tensor_out = tf.TensorArray(dtype=tf.float32, size=height*width, dynamic_size=False, clear_after_read=True, \
infer_shape=True, element_shape=[depth_input+depth_key], colocate_with_first_write_call=True)
def cond(i, tensor_out):
c = tf.less(i, height*width)
return c
def body(i, tensor_out):
ycd = i // width
xcd = i % width
ymn = ycd - ((shape[0] - 1) // 2) * rate
xmn = xcd - ((shape[1] - 1) // 2) * rate
ycds = tf.concat([[ymn], tf.tile([rate], [shape[0]-1])], axis=0)
xcds = tf.concat([[xmn], tf.tile([rate], [shape[1]-1])], axis=0)
ycds = tf.cumsum(ycds, axis=0, exclusive=False, reverse=False)
xcds = tf.cumsum(xcds, axis=0, exclusive=False, reverse=False)
idxs = tf.where(tf.logical_and(ycds>=0, ycds<height))
ycds = tf.gather_nd(ycds, idxs)
idxs = tf.where(tf.logical_and(xcds>=0, xcds<width ))
xcds = tf.gather_nd(xcds, idxs)
ycds = tf.tile(ycds[:, tf.newaxis], [1, tf.shape(xcds)[0]])
xcds = tf.tile(xcds[tf.newaxis, :], [tf.shape(ycds)[0], 1])
crd0 = tf.stack([ycd, xcd], axis=0) #(2) 实际中心
crds0 = tf.concat([ycds, xcds], axis=-1) #(h, w, 2) 实际坐标
fets0 = tf.gather_nd(tensor_value, crds0) #(h, w, c) 实际特征
fets3 = tf.gather_nd(tensor_key, crds0) #(h, w, c') 实际特征
crd1 = (shape - 1) // 2 #(2) 相对中心
crds1 = (crds0 - crd0) // rate #(h, w, 2) 相对坐标
crds1 = crds1 + crd1 #(h, w, 2) 相对坐标
fets1 = tf.gather_nd(PE, crds1) #(h, w, c) 相对特征
#fets2= tf.concat([fets0, fets1], axis=-1) #(h, w, c'') 融合特征
crd3 = crd0 - crds0[0, 0] #crd、crds下标换成1也一样 #(2) 相对坐标
fet3 = tf.gather_nd(fets3, crd3) #(c') 相对中心
#计算注意力
att3 = tf.einsum('ijk,k->ij', fets3, fet3) #(h, w)
att3 = tf.exp(att3 / tf.sqrt(depth_key)) #(h, w)
att3 = att3 / tf.reduce_sum(att3) #(h, w)
fet0 = tf.einsum('ij,ijk->k', att3, fets0) #(c)
fet1 = tf.einsum('ij,ijk->k', att3, fets1) #(c')
fet2 = tf.concat([fet0, fet1], axis=-1) #(c'')
#fet2 = tf.einsum('ij,ijk->k', att3, fets2) #(c'')
tensor_out = tensor_out.write(i, fet2) #(h, w, c')
return [i+1, tensor_out]
i = tf.constant(0)
[i, tensor_out] = tf.while_loop(cond, body, loop_vars=[i, tensor_out], shape_invariants=None, \
parallel_iterations=128, back_prop=True, swap_memory=True)
#使用1x1conv进行特征和位置向量的融合
params['conv'] = {'number':depth_input, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
tensor_out = conv_bn1(tensor_in, 0, params, mtrain)
tensor_out = relu1(tensor_out + tensor_in)
return tensor_out
def atten1(tensor_in=None, layer=0, params=None, mtrain=None):
shape = params['atten']['shape'] #attention关联的范围,比如[64, 64]
if isinstance(tensor_in, tuple):
tensor_in = tensor_in[0]
x_shape = get_shape(tensor_in)
depth_input = x_shape[-1]
depth_bottle = depth_input // 4
with tf.variable_scope('atten1_'+str(layer), reuse=reuse) as scope:
#使用1x1conv降维
params['conv'] = {'number':depth_bottle, 'shape':[1, 1], 'rate':1, 'stride':[1, 1], 'padding':'VALID'}
fet_com = conv_bn_relu1(tensor_in, 0, params, mtrain)
#x_shape = tensor_in.get_shape().as_list()
x_shape = get_shape(tensor_in)
kernel_shape = [shape[0], shape[1], x_shape[3], number]
kernel_stride = [1, stride[0], stride[1], 1]
with tf.variable_scope('conv1_'+str(layer), reuse=reuse) as scope:
kernel = tf.get_variable(name='weights', shape=kernel_shape, dtype=dtype, \
#initializer=tf.truncated_normal_initializer(stddev=wscale), \
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,mode='FAN_AVG',uniform=True),
#initializer=tf.contrib.layers.xavier_initializer(uniform=True, dtype=tf.float32),
regularizer=tf.contrib.layers.l2_regularizer(reg), \
trainable=trainable)
if use_bias:
biases = tf.get_variable(name='biases', shape=[number], dtype=dtype, \
initializer=tf.constant_initializer(0.0), \
trainable=trainable)
if rate == 1:
conv = tf.nn.conv2d(tensor_in, kernel, kernel_stride, padding=padding)
else:
conv = tf.nn.atrous_conv2d(tensor_in, kernel, rate, padding=padding)
if use_bias:
tensor_out = tf.nn.bias_add(conv, biases)
else:
tensor_out = conv
#tf.summary.histogram('conv', tensor_out)
print_activations(tensor_out)
return tensor_out
| 49.941334
| 149
| 0.484377
| 32,388
| 241,766
| 3.431641
| 0.02399
| 0.013046
| 0.009366
| 0.00673
| 0.928237
| 0.914741
| 0.899671
| 0.880812
| 0.869917
| 0.860577
| 0
| 0.062292
| 0.358367
| 241,766
| 4,841
| 150
| 49.941334
| 0.654008
| 0
| 0
| 0.828956
| 0
| 0
| 0.052805
| 0
| 0
| 0
| 0
| 0
| 0.012458
| 0
| null | null | 0
| 0
| null | null | 0.013131
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c531ab4e0ff443cd0aabc9ac3ac195bf2f803c1
| 48,457
|
py
|
Python
|
pybind/nos/v6_0_2c/snmp_server/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v6_0_2c/snmp_server/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v6_0_2c/snmp_server/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import context
import community
import user
import v3host
import host
import agtconfig
import enable
import engineID_drop
import view
import group
class snmp_server(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-snmp - based on the path /snmp-server. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__context','__community','__user','__v3host','__host','__agtconfig','__enable','__engineID_drop','__view','__group',)
_yang_name = 'snmp-server'
_rest_name = 'snmp-server'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__engineID_drop = YANGDynClass(base=engineID_drop.engineID_drop, is_container='container', presence=False, yang_name="engineID-drop", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
self.__enable = YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables/Disables the traps.', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
self.__group = YANGDynClass(base=YANGListType("group_name group_version",group.group, yang_name="group", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='group-name group-version', extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}), is_container='list', yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__v3host = YANGDynClass(base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__agtconfig = YANGDynClass(base=agtconfig.agtconfig, is_container='container', presence=False, yang_name="agtconfig", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'snmpsystemgroup', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
self.__community = YANGDynClass(base=YANGListType("community",community.community, yang_name="community", rest_name="community", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='community', extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}), is_container='list', yang_name="community", rest_name="community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__host = YANGDynClass(base=YANGListType("ip community",host.host, yang_name="host", rest_name="host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip community', extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}), is_container='list', yang_name="host", rest_name="host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__user = YANGDynClass(base=YANGListType("username",user.user, yang_name="user", rest_name="user", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='username', extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}), is_container='list', yang_name="user", rest_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__context = YANGDynClass(base=YANGListType("context_name",context.context, yang_name="context", rest_name="context", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='context-name', extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}), is_container='list', yang_name="context", rest_name="context", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
self.__view = YANGDynClass(base=YANGListType("viewname mibtree",view.view, yang_name="view", rest_name="view", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='viewname mibtree', extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}), is_container='list', yang_name="view", rest_name="view", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'snmp-server']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'snmp-server']
def _get_context(self):
"""
Getter method for context, mapped from YANG variable /snmp_server/context (list)
YANG Description: provides the mapping of SNMP context (represented by
the value of vacmContextName) to the various entities
within the entities within the managed device
"""
return self.__context
def _set_context(self, v, load=False):
"""
Setter method for context, mapped from YANG variable /snmp_server/context (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_context is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_context() directly.
YANG Description: provides the mapping of SNMP context (represented by
the value of vacmContextName) to the various entities
within the entities within the managed device
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("context_name",context.context, yang_name="context", rest_name="context", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='context-name', extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}), is_container='list', yang_name="context", rest_name="context", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """context must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("context_name",context.context, yang_name="context", rest_name="context", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='context-name', extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}), is_container='list', yang_name="context", rest_name="context", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__context = t
if hasattr(self, '_set'):
self._set()
def _unset_context(self):
self.__context = YANGDynClass(base=YANGListType("context_name",context.context, yang_name="context", rest_name="context", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='context-name', extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}), is_container='list', yang_name="context", rest_name="context", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'context to various Instance Mapping', u'cli-suppress-list-no': None, u'callpoint': u'snmpContextMapping'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_community(self):
"""
Getter method for community, mapped from YANG variable /snmp_server/community (list)
"""
return self.__community
def _set_community(self, v, load=False):
"""
Setter method for community, mapped from YANG variable /snmp_server/community (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_community is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_community() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("community",community.community, yang_name="community", rest_name="community", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='community', extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}), is_container='list', yang_name="community", rest_name="community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """community must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("community",community.community, yang_name="community", rest_name="community", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='community', extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}), is_container='list', yang_name="community", rest_name="community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__community = t
if hasattr(self, '_set'):
self._set()
def _unset_community(self):
self.__community = YANGDynClass(base=YANGListType("community",community.community, yang_name="community", rest_name="community", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='community', extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}), is_container='list', yang_name="community", rest_name="community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds community strings and groupname asscoiated with the community.', u'cli-suppress-mode': None, u'sort-priority': u'22', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpsetcommunity'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_user(self):
"""
Getter method for user, mapped from YANG variable /snmp_server/user (list)
"""
return self.__user
def _set_user(self, v, load=False):
"""
Setter method for user, mapped from YANG variable /snmp_server/user (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_user is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_user() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("username",user.user, yang_name="user", rest_name="user", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='username', extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}), is_container='list', yang_name="user", rest_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """user must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("username",user.user, yang_name="user", rest_name="user", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='username', extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}), is_container='list', yang_name="user", rest_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__user = t
if hasattr(self, '_set'):
self._set()
def _unset_user(self):
self.__user = YANGDynClass(base=YANGListType("username",user.user, yang_name="user", rest_name="user", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='username', extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}), is_container='list', yang_name="user", rest_name="user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds username, groupname (admin | user), auth\nand priv attributes associated with SNMP username', u'cli-suppress-mode': None, u'sort-priority': u'24', u'cli-suppress-show-match': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpuser'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_v3host(self):
"""
Getter method for v3host, mapped from YANG variable /snmp_server/v3host (list)
"""
return self.__v3host
def _set_v3host(self, v, load=False):
"""
Setter method for v3host, mapped from YANG variable /snmp_server/v3host (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_v3host is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_v3host() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """v3host must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__v3host = t
if hasattr(self, '_set'):
self._set()
def _unset_v3host(self):
self.__v3host = YANGDynClass(base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'25'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_host(self):
"""
Getter method for host, mapped from YANG variable /snmp_server/host (list)
"""
return self.__host
def _set_host(self, v, load=False):
"""
Setter method for host, mapped from YANG variable /snmp_server/host (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_host is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_host() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("ip community",host.host, yang_name="host", rest_name="host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip community', extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}), is_container='list', yang_name="host", rest_name="host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """host must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("ip community",host.host, yang_name="host", rest_name="host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip community', extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}), is_container='list', yang_name="host", rest_name="host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__host = t
if hasattr(self, '_set'):
self._set()
def _unset_host(self):
self.__host = YANGDynClass(base=YANGListType("ip community",host.host, yang_name="host", rest_name="host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip community', extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}), is_container='list', yang_name="host", rest_name="host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, community string, version\n(v1 | v2c), port number used to send traps\nand severity level', u'sort-priority': u'23', u'callpoint': u'snmphost', u'cli-suppress-key-abbreviation': None, u'cli-suppress-list-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_agtconfig(self):
"""
Getter method for agtconfig, mapped from YANG variable /snmp_server/agtconfig (container)
"""
return self.__agtconfig
def _set_agtconfig(self, v, load=False):
"""
Setter method for agtconfig, mapped from YANG variable /snmp_server/agtconfig (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_agtconfig is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_agtconfig() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=agtconfig.agtconfig, is_container='container', presence=False, yang_name="agtconfig", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'snmpsystemgroup', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """agtconfig must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=agtconfig.agtconfig, is_container='container', presence=False, yang_name="agtconfig", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'snmpsystemgroup', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)""",
})
self.__agtconfig = t
if hasattr(self, '_set'):
self._set()
def _unset_agtconfig(self):
self.__agtconfig = YANGDynClass(base=agtconfig.agtconfig, is_container='container', presence=False, yang_name="agtconfig", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'snmpsystemgroup', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /snmp_server/enable (container)
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /snmp_server/enable (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables/Disables the traps.', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables/Disables the traps.', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)""",
})
self.__enable = t
if hasattr(self, '_set'):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables/Disables the traps.', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
def _get_engineID_drop(self):
"""
Getter method for engineID_drop, mapped from YANG variable /snmp_server/engineID_drop (container)
"""
return self.__engineID_drop
def _set_engineID_drop(self, v, load=False):
"""
Setter method for engineID_drop, mapped from YANG variable /snmp_server/engineID_drop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_engineID_drop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_engineID_drop() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=engineID_drop.engineID_drop, is_container='container', presence=False, yang_name="engineID-drop", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """engineID_drop must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=engineID_drop.engineID_drop, is_container='container', presence=False, yang_name="engineID-drop", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)""",
})
self.__engineID_drop = t
if hasattr(self, '_set'):
self._set()
def _unset_engineID_drop(self):
self.__engineID_drop = YANGDynClass(base=engineID_drop.engineID_drop, is_container='container', presence=False, yang_name="engineID-drop", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='container', is_config=True)
def _get_view(self):
"""
Getter method for view, mapped from YANG variable /snmp_server/view (list)
"""
return self.__view
def _set_view(self, v, load=False):
"""
Setter method for view, mapped from YANG variable /snmp_server/view (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_view is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_view() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("viewname mibtree",view.view, yang_name="view", rest_name="view", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='viewname mibtree', extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}), is_container='list', yang_name="view", rest_name="view", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """view must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("viewname mibtree",view.view, yang_name="view", rest_name="view", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='viewname mibtree', extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}), is_container='list', yang_name="view", rest_name="view", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__view = t
if hasattr(self, '_set'):
self._set()
def _unset_view(self):
self.__view = YANGDynClass(base=YANGListType("viewname mibtree",view.view, yang_name="view", rest_name="view", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='viewname mibtree', extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}), is_container='list', yang_name="view", rest_name="view", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
def _get_group(self):
"""
Getter method for group, mapped from YANG variable /snmp_server/group (list)
"""
return self.__group
def _set_group(self, v, load=False):
"""
Setter method for group, mapped from YANG variable /snmp_server/group (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_group is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("group_name group_version",group.group, yang_name="group", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='group-name group-version', extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}), is_container='list', yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """group must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("group_name group_version",group.group, yang_name="group", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='group-name group-version', extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}), is_container='list', yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""",
})
self.__group = t
if hasattr(self, '_set'):
self._set()
def _unset_group(self):
self.__group = YANGDynClass(base=YANGListType("group_name group_version",group.group, yang_name="group", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='group-name group-version', extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}), is_container='list', yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'group\tDefine a User Security Model group', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'27', u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'snmpgroup'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)
context = __builtin__.property(_get_context, _set_context)
community = __builtin__.property(_get_community, _set_community)
user = __builtin__.property(_get_user, _set_user)
v3host = __builtin__.property(_get_v3host, _set_v3host)
host = __builtin__.property(_get_host, _set_host)
agtconfig = __builtin__.property(_get_agtconfig, _set_agtconfig)
enable = __builtin__.property(_get_enable, _set_enable)
engineID_drop = __builtin__.property(_get_engineID_drop, _set_engineID_drop)
view = __builtin__.property(_get_view, _set_view)
group = __builtin__.property(_get_group, _set_group)
_pyangbind_elements = {'context': context, 'community': community, 'user': user, 'v3host': v3host, 'host': host, 'agtconfig': agtconfig, 'enable': enable, 'engineID_drop': engineID_drop, 'view': view, 'group': group, }
| 106.265351
| 1,209
| 0.734197
| 7,011
| 48,457
| 4.911853
| 0.036229
| 0.02416
| 0.055754
| 0.035543
| 0.913785
| 0.905973
| 0.901443
| 0.895636
| 0.895636
| 0.886343
| 0
| 0.004349
| 0.112718
| 48,457
| 455
| 1,210
| 106.498901
| 0.796604
| 0.092866
| 0
| 0.47482
| 0
| 0.057554
| 0.496254
| 0.158515
| 0
| 0
| 0
| 0
| 0
| 1
| 0.118705
| false
| 0
| 0.064748
| 0
| 0.294964
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c6092065d243870cf79e88336bc47926bd1181d
| 5,725
|
py
|
Python
|
sources/icon.py
|
ssyatelandisi/Excel-to-CSV
|
4ff06485e6483f39a217f45c866ddb542a6df567
|
[
"Apache-2.0"
] | null | null | null |
sources/icon.py
|
ssyatelandisi/Excel-to-CSV
|
4ff06485e6483f39a217f45c866ddb542a6df567
|
[
"Apache-2.0"
] | null | null | null |
sources/icon.py
|
ssyatelandisi/Excel-to-CSV
|
4ff06485e6483f39a217f45c866ddb542a6df567
|
[
"Apache-2.0"
] | null | null | null |
img = b'AAABAAEAICAAAAEAIACoEAAAFgAAACgAAAAgAAAAQAAAAAEAIAAAAAAAABAAAMMOAADDDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi4YlDJKKHkqUix2FlIwdq5SMHMGTihvDlIwdvZSMHaOUix14kYkfOoeCKAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAkoogMJWMHaGVjBzzlo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lYwc55SMHYeQiSEaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAk4skFpWMHaOVjRz9lo0c/5aNHP+WjRz/lo0c/5eOHP+ZkBz/mpEc/5mQHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5WMHPWUjB58kYstBgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJWNID6VjBznlo0c/5aNHP+WjRz/nJMd/7CmIf/EuSz/0sc5/9jNQv/az0X/18xA/8/ENv/AtSn/qqAg/5mQHP+WjRz/lo0c/5aNHP+VjB3LlY0kHgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACVjR9SlYwc+ZaNHP+WjRz/m5Ec/7etI//UyTf/3tRQ/+HYYf/j22r/5Nxu/+Tcb//k3G3/49po/+HXXf/d00r/z8Qx/6+lIf+Yjxz/lo0c/5aNHP+VjBzllY0fKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlY0hRJWMHPmWjRz/lo0c/6OaHv/Lvyv/3NFF/+DWWP/g12Xn4Nhuld7Xdljc1Xw219F/LNzWejzf2HNk39dsqeDXYvXf1VX/2s8+/8K3Jv+dkx3/lo0c/5aNHP+VjR3llY4kHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJaPJRyWjRztlo0c/5aNHP+mnB//z8Mr/9vRQf/d01Dr2tFfaMnEeAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0MpuFNvSW4vd00352s88/8e7Jv+elB3/lo0c/5aNHP+WjR3Hl5M0BAAAAAAAAAAAAAAAAAAAAAAAAAAAlo0es5aNHP+WjRz/oJcd/8m+Jv/ZzTT/2M5Bx8/GWBoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANTKUDjZzj7n2Mww/8C1I/+akRz/lo0c/5aNHP+Wjh90AAAAAAAAAAAAAAAAAAAAAJaOIkKWjRz/lo0c/5iPHP+6ryL/0cUn/9TIL8fFvU0KAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMzDQCbUyCvrzsIm/7OoIf+ZkBz/lo0c/5aNHPGWjygSAAAAAAAAAAAAAAAAlo4et5aNHP+WjRz/pZwe/76zI//DuCbtwrk3GgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMS6MUbOwif91ssz/9PIOv+/tCr/oZge/5ePH3gAAAAAAAAAAJWOKBiWjRz9lo0c/5aNHP+jmh7/opke/6OaI2oAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANPJNqng1lf/49pp/9/WW/vTyT7Br6YqXAAAAAAAAAAAlY0fXpaNHP+WjRz/lo0c/5aNHP+WjR3rlI8vCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1MxfMuDYar3f2HVo0sx2GAAAAAAAAAAAAAAAAAAAAACVjR6Vlo0c/5aNHP+WjRz/lo0c/5SLHNOOhRp0joUadI6FGnSOhRp0joUadI6FGnSOhRp0joUadI6FGnSOhRp0joUadI6FGnSOhRp0joUadI6FGnSOhRp0ioEZcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJOKHr+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+RiRv1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAjoYb1ZaNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5GJG/UAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACOhhvVlo0c/5aNHP+Yjxz/o5oe/7iuI//GvCz/y8Az/8vANf/LwTX/y8E1/8vBNf/LwTX/y8E1/8vBNf/LwTX/y8E1/8vBNf/LwTX/y8E1/8m+Mv+ypyP/k4ob9QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJOKHr+WjRz/l44c/5qRHP+yqCH/1soy/+DWWP/j2mj/49tr/+Pba//j22v/49tr/+Pba//j22v/49tr/+Pba//j22v/49tr/+Pba//j22v/49pp/93TTf+6sCb1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlY0elZaNHP+akRz/mI8c/6SbHv+/tCjT2M9WdN7WbnTf13N039hzdN/Yc3Tf2HN039hzdN/Yc3Tf2HN039hzdN/Yc3Tf2HN039hzdN/Yc3Tf13N03dVodNHHRXAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACVjR9elo0c/6CXHv+ZkBz/lo0c/5aNHeuUjy8IAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACWjyMylYwdvZSMHmqQiSMaAAAAAAAAAAAAAAAAAAAAAJWOKBiYjxz9p50f/56VHf+WjRz/lo0c/5aOIGoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJaOHqmWjRz/lo0c/5WMHPuVjB3FkoofYAAAAAAAAAAAAAAAAKCYILetoyD/q6Eg/5aNHP+WjRz/lo0c7ZaPJRoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACWjiFGlo0c/ZaNHP+WjRz/lo0c/5aNHP+Wjh94AAAAAAAAAAAAAAAAqKApQrSqIf+7sCP/opge/5aNHP+WjRz/lY0dx5WOIwoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlo4gJpWNHOuWjRz/lo0c/6OaHv+qoB//oJcf8ZaPKBIAAAAAAAAAAAAAAAAAAAAAu7Ans8a7Jf++syP/m5Id/5aNHP+WjRz/lY0dx5OLIxoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJSMITiVjBznlo0c/5aNHP+imB7/xLgk/8S5JP+4rih0AAAAAAAAAAAAAAAAAAAAAAAAAAC9tDQczcEo7dPHKP+9siP/mpEc/5aNHP+WjRz/lYwc65SMHmiLhicIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAI+IIhSVjB6LlYwc+ZaNHP+WjRz/oJYd/8a7Jf/Txyf/yb4px7SvRwQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADNwzNE18sv+djMMf/CtyX/nZQd/5aNHP+WjRz/lo0c/5WMHOeUjB2Vk4seWJSMIDaPiB8sk4sgPJSLHmSUjB2plYwc9ZaNHP+WjRz/lo0c/6SaHv/Lvyj/2M0y/9bKLuXHvTkcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADRxz9S2s89+dvQPf/Pwy7/rKIg/5eOHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/lo0c/5qRHP+1qiL/1Mky/9vQP//ZzjzlzsRBKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADUykw+29FL597TTf/b0EL/yr8u/66kIf+ckx3/lo0c/5aNHP+WjRz/lo0c/5aNHP+WjRz/l40c/5+WHf+1qiP/0MUz/9zSRv/e1E3/2tBKy9HJTx4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADQyF8W29JYo+DWWv3g11r/3tRQ/9jOQf/MwTT/wLYs/7qvJ/+4rSb/u7Eo/8O4Lf/QxTf/2tBF/9/VU//g11r/39Va9drRWHzIwmkGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1s9oMN7WZqHh2Gjz49po/+PaaP/i2WX/4tlj/+LZYv/i2WP/4tpm/+PaaP/j2mj/4Nho593VZYfSzGsaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM3Ifwzc1XNK4NlzheLadavi2nXB4ttzw+Hadb3h2nSj4NhzeNrTdTrIxIIEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////gA///gAD//gAAP/wAAB/4AAAP8AAAB+AH8APgH/wDwD/+AcB//wGA//+BgP//h4AAAP+AAAD/gAAA/4AAAP+AAAD/gAAA/4D//4eA//+BwH//AcA//gHgH/wD4AfwA/AAAAf4AAAP/AAAH/4AAD//gAD//+AD//////8='
| 5,725
| 5,725
| 0.92262
| 383
| 5,725
| 13.791123
| 0.571802
| 0.046952
| 0.056607
| 0.061151
| 0.075161
| 0.071185
| 0.066263
| 0.066263
| 0.050738
| 0.042408
| 0
| 0.080377
| 0.000349
| 5,725
| 1
| 5,725
| 5,725
| 0.842565
| 0
| 0
| 0
| 0
| 1
| 0.998254
| 0.998254
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c6f4f39f28634b523c57d3cfff67cec75e9a7e7
| 12,860
|
py
|
Python
|
regreg/affine/tests/test_normalize.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 11
|
2016-02-25T01:53:03.000Z
|
2020-11-30T00:59:46.000Z
|
regreg/affine/tests/test_normalize.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 21
|
2015-09-17T19:18:09.000Z
|
2021-04-28T06:15:02.000Z
|
regreg/affine/tests/test_normalize.py
|
vishalbelsare/regreg
|
d1b62cc43cdd83331f2b0817b0ae099d5ef97966
|
[
"BSD-2-Clause"
] | 8
|
2016-03-24T00:03:03.000Z
|
2019-08-25T23:40:42.000Z
|
from itertools import product
import nose.tools as nt
import numpy as np
import scipy.sparse
import regreg.api as rr
from regreg.identity_quadratic import identity_quadratic as sq
from regreg.tests.decorators import set_seed_for_test
@set_seed_for_test()
def test_centering():
"""
This test verifies that the normalized transform
of affine correctly implements the linear
transform that multiplies first by X, then centers.
"""
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 50
# design - with ones as last column
X = np.ones((N,P))
X[:,:-1] = np.random.normal(size=(N,P-1)) + offset
X2 = X - X.mean(axis=0)[None,:]
L = rr.normalize(X, center=True, scale=False)
# coef for loss
for _ in range(10):
beta = np.random.normal(size=(P,))
v = L.linear_map(beta)
v2 = np.dot(X, beta)
v2 -= v2.mean()
v3 = np.dot(X2, beta)
v4 = L.affine_map(beta)
np.testing.assert_almost_equal(v, v3)
np.testing.assert_almost_equal(v, v2)
np.testing.assert_almost_equal(v, v4)
y = np.random.standard_normal(N)
u1 = L.adjoint_map(y)
y2 = y - y.mean()
u2 = np.dot(X.T, y2)
np.testing.assert_almost_equal(u1, u2)
@set_seed_for_test()
def test_scaling():
"""
This test verifies that the normalized transform
of affine correctly implements the linear
transform that multiplies first by X, then centers.
"""
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 50
# design - with ones as last column
X = np.ones((N,P))
X[:,:-1] = np.random.normal(size=(N,P-1)) + offset
L = rr.normalize(X, center=False, scale=True)
# coef for loss
scalings = np.sqrt((X**2).sum(0) / N)
scaling_matrix = np.diag(1./scalings)
for _ in range(10):
beta = np.random.normal(size=(P,))
v = L.linear_map(beta)
v2 = np.dot(X, np.dot(scaling_matrix, beta))
v3 = L.affine_map(beta)
np.testing.assert_almost_equal(v, v2)
np.testing.assert_almost_equal(v, v3)
y = np.random.standard_normal(N)
u1 = L.adjoint_map(y)
u2 = np.dot(scaling_matrix, np.dot(X.T, y))
np.testing.assert_almost_equal(u1, u2)
@set_seed_for_test()
def test_scaling_and_centering():
"""
This test verifies that the normalized transform
of affine correctly implements the linear
transform that multiplies first by X, then centers.
"""
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 50
# design - with no colum of ones!
X = np.random.normal(size=(N,P)) + offset
L = rr.normalize(X, center=True, scale=True) # the default
# coef for loss
scalings = np.std(X, 0, ddof=1)
scaling_matrix = np.diag(1./scalings)
for _ in range(10):
beta = np.random.normal(size=(P,))
v = L.linear_map(beta)
v2 = np.dot(X, np.dot(scaling_matrix, beta))
v2 -= v2.mean()
np.testing.assert_almost_equal(v, v2)
y = np.random.standard_normal(N)
u1 = L.adjoint_map(y)
y2 = y - y.mean()
u2 = np.dot(scaling_matrix, np.dot(X.T, y2))
np.testing.assert_almost_equal(u1, u2)
@set_seed_for_test()
def test_centering_fit(debug=False):
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 50
# design - with ones as last column
X = np.ones((N,P))
X = np.random.normal(size=(N,P)) + offset
X2 = X - X.mean(axis=0)[None,:]
# the normalizer
L = rr.normalize(X, center=True, scale=False)
# data
Y = np.random.normal(size=(N,)) + offset
# coef for loss
coef = 0.5
# lagrange for penalty
lagrange = .1
# Loss function (squared difference between fitted and actual data)
loss = rr.quadratic_loss.affine(L, -Y, coef=coef)
penalties = [rr.constrained_positive_part(25, lagrange=lagrange),
rr.nonnegative(5)]
groups = [slice(0,25), slice(25,30)]
penalty = rr.separable((P,), penalties,
groups)
initial = np.random.standard_normal(P)
composite_form = rr.separable_problem.fromatom(penalty, loss)
solver = rr.FISTA(composite_form)
solver.debug = debug
solver.fit(tol=1.0e-12, min_its=200)
coefs = solver.composite.coefs
# Solve the problem with X2
loss2 = rr.quadratic_loss.affine(X2, -Y, coef=coef)
initial2 = np.random.standard_normal(P)
composite_form2 = rr.separable_problem.fromatom(penalty, loss2)
for _ in range(10):
beta = np.random.standard_normal(P)
g1 = loss.smooth_objective(beta, mode='grad')
g2 = loss2.smooth_objective(beta, mode='grad')
np.testing.assert_almost_equal(g1, g2)
b1 = penalty.proximal(sq(1, beta, g1, 0))
b2 = penalty.proximal(sq(1, beta, g1, 0))
np.testing.assert_almost_equal(b1, b2)
f1 = composite_form.objective(beta)
f2 = composite_form2.objective(beta)
np.testing.assert_almost_equal(f1, f2)
solver2 = rr.FISTA(composite_form2)
solver2.debug = debug
solver2.fit(tol=1.0e-12, min_its=200)
coefs2 = solver2.composite.coefs
np.testing.assert_almost_equal(composite_form.objective(coefs), composite_form.objective(coefs2))
np.testing.assert_almost_equal(composite_form2.objective(coefs), composite_form2.objective(coefs2))
nt.assert_true(np.linalg.norm(coefs - coefs2) / max(np.linalg.norm(coefs),1) < 1.0e-04)
@set_seed_for_test()
def test_scaling_fit(debug=False):
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 2
# design - with ones as last column
X = np.ones((N,P))
X[:,:-1] = np.random.normal(size=(N,P-1)) + offset
X2 = X / (np.sqrt((X**2).mean(0)))[None,:]
L = rr.normalize(X, center=False, scale=True)
# data
Y = np.random.normal(size=(N,)) + offset
# lagrange for penalty
lagrange = .1
# Loss function (squared difference between fitted and actual data)
loss = rr.squared_error(L, Y)
penalties = [rr.constrained_positive_part(25, lagrange=lagrange),
rr.nonnegative(5)]
groups = [slice(0,25), slice(25,30)]
penalty = rr.separable((P,), penalties,
groups)
initial = np.random.standard_normal(P)
composite_form = rr.separable_problem.fromatom(penalty, loss)
solver = rr.FISTA(composite_form)
solver.debug = debug
solver.fit(tol=1.0e-12, min_its=200)
coefs = solver.composite.coefs
# Solve the problem with X2
loss2 = rr.squared_error(X2, Y)
initial2 = np.random.standard_normal(P)
composite_form2 = rr.separable_problem.fromatom(penalty, loss2)
solver2 = rr.FISTA(composite_form2)
solver2.debug = debug
solver2.fit(tol=1.0e-12, min_its=200)
coefs2 = solver2.composite.coefs
for _ in range(10):
beta = np.random.standard_normal(P)
g1 = loss.smooth_objective(beta, mode='grad')
g2 = loss2.smooth_objective(beta, mode='grad')
np.testing.assert_almost_equal(g1, g2)
b1 = penalty.proximal(sq(1, beta, g1, 0))
b2 = penalty.proximal(sq(1, beta, g2, 0))
np.testing.assert_almost_equal(b1, b2)
f1 = composite_form.objective(beta)
f2 = composite_form2.objective(beta)
np.testing.assert_almost_equal(f1, f2)
np.testing.assert_almost_equal(composite_form.objective(coefs), composite_form.objective(coefs2))
np.testing.assert_almost_equal(composite_form2.objective(coefs), composite_form2.objective(coefs2))
nt.assert_true(np.linalg.norm(coefs - coefs2) / max(np.linalg.norm(coefs),1) < 1.0e-04)
@set_seed_for_test()
def test_scaling_and_centering_fit(debug=False):
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 2
# design - with ones as last column
X = np.random.normal(size=(N,P)) + 0 * offset
X2 = X - X.mean(0)[None,:]
X2 = X2 / np.std(X2,0,ddof=1)[None,:]
L = rr.normalize(X, center=True, scale=True)
# data
Y = np.random.normal(size=(N,)) + offset
# lagrange for penalty
lagrange = .1
# Loss function (squared difference between fitted and actual data)
loss = rr.squared_error(L, Y)
penalties = [rr.constrained_positive_part(25, lagrange=lagrange),
rr.nonnegative(5)]
groups = [slice(0,25), slice(25,30)]
penalty = rr.separable((P,), penalties,
groups)
initial = np.random.standard_normal(P)
composite_form = rr.separable_problem.fromatom(penalty, loss)
solver = rr.FISTA(composite_form)
solver.debug = debug
solver.fit(tol=1.0e-12, min_its=200)
coefs = solver.composite.coefs
# Solve the problem with X2
loss2 = rr.squared_error(X2, Y)
initial2 = np.random.standard_normal(P)
composite_form2 = rr.separable_problem.fromatom(penalty, loss2)
solver2 = rr.FISTA(composite_form2)
solver2.debug = debug
solver2.fit(tol=1.0e-12, min_its=200)
coefs2 = solver2.composite.coefs
for _ in range(10):
beta = np.random.standard_normal(P)
g1 = loss.smooth_objective(beta, mode='grad')
g2 = loss2.smooth_objective(beta, mode='grad')
np.testing.assert_almost_equal(g1, g2)
b1 = penalty.proximal(sq(1, beta, g1, 0))
b2 = penalty.proximal(sq(1, beta, g2, 0))
np.testing.assert_almost_equal(b1, b2)
f1 = composite_form.objective(beta)
f2 = composite_form2.objective(beta)
np.testing.assert_almost_equal(f1, f2)
np.testing.assert_almost_equal(composite_form.objective(coefs), composite_form.objective(coefs2))
np.testing.assert_almost_equal(composite_form2.objective(coefs), composite_form2.objective(coefs2))
nt.assert_true(np.linalg.norm(coefs - coefs2) / max(np.linalg.norm(coefs),1) < 1.0e-04)
@set_seed_for_test()
def test_scaling_and_centering_intercept_fit(debug=False):
# N - number of data points
# P - number of columns in design == number of betas
N, P = 40, 30
# an arbitrary positive offset for data and design
offset = 2
# design - with ones as last column
X = np.random.normal(size=(N,P)) + 0 * offset
X2 = X - X.mean(0)[None,:]
X2 = X2 / np.std(X2,0,ddof=1)[None,:]
X2 = np.hstack([np.ones((X2.shape[0],1)), X2])
L = rr.normalize(X, center=True, scale=True, intercept=True)
# data
Y = np.random.normal(size=(N,)) + offset
# lagrange for penalty
lagrange = .1
# Loss function (squared difference between fitted and actual data)
loss = rr.squared_error(L, Y)
penalties = [rr.constrained_positive_part(25, lagrange=lagrange),
rr.nonnegative(5)]
groups = [slice(0,25), slice(25,30)]
penalty = rr.separable((P+1,), penalties,
groups)
initial = np.random.standard_normal(P+1)
composite_form = rr.separable_problem.fromatom(penalty, loss)
solver = rr.FISTA(composite_form)
solver.debug = debug
solver.fit(tol=1.0e-12, min_its=200)
coefs = solver.composite.coefs
# Solve the problem with X2
loss2 = rr.squared_error(X2, Y)
initial2 = np.random.standard_normal(P+1)
composite_form2 = rr.separable_problem.fromatom(penalty, loss2)
solver2 = rr.FISTA(composite_form2)
solver2.debug = debug
solver2.fit(tol=1.0e-12, min_its=200)
coefs2 = solver2.composite.coefs
for _ in range(10):
beta = np.random.standard_normal(P+1)
g1 = loss.smooth_objective(beta, mode='grad')
g2 = loss2.smooth_objective(beta, mode='grad')
np.testing.assert_almost_equal(g1, g2)
b1 = penalty.proximal(sq(1, beta, g1, 0))
b2 = penalty.proximal(sq(1, beta, g2, 0))
np.testing.assert_almost_equal(b1, b2)
f1 = composite_form.objective(beta)
f2 = composite_form2.objective(beta)
np.testing.assert_almost_equal(f1, f2)
np.testing.assert_almost_equal(composite_form.objective(coefs), composite_form.objective(coefs2))
np.testing.assert_almost_equal(composite_form2.objective(coefs), composite_form2.objective(coefs2))
nt.assert_true(np.linalg.norm(coefs - coefs2) / max(np.linalg.norm(coefs),1) < 1.0e-04)
| 32.974359
| 103
| 0.648445
| 1,876
| 12,860
| 4.326759
| 0.091684
| 0.028582
| 0.053591
| 0.075028
| 0.946162
| 0.940988
| 0.940865
| 0.933103
| 0.887643
| 0.882346
| 0
| 0.040327
| 0.228694
| 12,860
| 389
| 104
| 33.059126
| 0.778002
| 0.163453
| 0
| 0.855967
| 0
| 0
| 0.003008
| 0
| 0
| 0
| 0
| 0
| 0.135802
| 1
| 0.028807
| false
| 0
| 0.028807
| 0
| 0.057613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5d5ee28d0ead15b6ff678b3a4ee98a71a171788
| 4,658
|
py
|
Python
|
tests/system/test_connection_asr903.py
|
kstaniek/condoor
|
77c054b29d4e286c1d7aca2c74dff86b805e1fae
|
[
"Apache-2.0"
] | 7
|
2016-01-20T09:04:09.000Z
|
2020-02-25T07:14:38.000Z
|
tests/system/test_connection_asr903.py
|
kstaniek/condoor
|
77c054b29d4e286c1d7aca2c74dff86b805e1fae
|
[
"Apache-2.0"
] | 55
|
2015-12-16T14:50:59.000Z
|
2018-04-23T15:27:15.000Z
|
tests/system/test_connection_asr903.py
|
kstaniek/condoor
|
77c054b29d4e286c1d7aca2c74dff86b805e1fae
|
[
"Apache-2.0"
] | 19
|
2016-04-22T06:09:32.000Z
|
2022-02-25T20:21:51.000Z
|
from tests.system.common import CondoorTestCase, StopTelnetSrv, StartTelnetSrv
from tests.dmock.dmock import ASR903Handler
from tests.utils import remove_cache_file
import condoor
class TestASR903Connection(CondoorTestCase):
@StartTelnetSrv(ASR903Handler, 10026)
def setUp(self):
CondoorTestCase.setUp(self)
@StopTelnetSrv()
def tearDown(self):
pass
def test_ASR903_1_discovery(self):
"""ASR903: Test the connection and discovery"""
remove_cache_file()
urls = ["telnet://admin:admin@127.0.0.1:10026/?enable_password=admin"]
conn = condoor.Connection("host", urls, log_session=self.log_session, log_level=self.log_level)
self.conn = conn
conn.connect(self.logfile_condoor)
self.assertEqual(conn.is_discovered, True, "Not discovered properly")
self.assertEqual(conn.hostname, "PAN-5205-ASR903", "Wrong Hostname: {}".format(conn.hostname))
self.assertEqual(conn.family, "ASR900", "Wrong Family: {}".format(conn.family))
self.assertEqual(conn.platform, "ASR-903", "Wrong Platform: {}".format(conn.platform))
self.assertEqual(conn.os_type, "XE", "Wrong OS Type: {}".format(conn.os_type))
self.assertEqual(conn.os_version, "03.18.00.S", "Wrong Version: {}".format(conn.os_version))
self.assertEqual(conn.udi['name'], "Chassis", "Wrong Name: {}".format(conn.udi['name']))
self.assertEqual(conn.udi['description'], "ASR 903 Series Router Chassis",
"Wrong Description: {}".format(conn.udi['description']))
self.assertEqual(conn.udi['pid'], "ASR-903", "Wrong PID: {}".format(conn.udi['pid']))
self.assertEqual(conn.udi['vid'], "V01", "Wrong VID: {}".format(conn.udi['vid']))
self.assertEqual(conn.udi['sn'], "FOX1717P569", "Wrong S/N: {}".format(conn.udi['sn']))
self.assertEqual(conn.prompt, "PAN-5205-ASR903#", "Wrong Prompt: {}".format(conn.prompt))
with self.assertRaises(condoor.CommandSyntaxError):
conn.send("wrongcommand")
conn.disconnect()
def test_ASR903_2_rediscovery(self):
"""ASR903: Test whether the cached information is used"""
urls = ["telnet://admin:admin@127.0.0.1:10026/?enable_password=admin"]
conn = condoor.Connection("host", urls, log_session=self.log_session, log_level=self.log_level)
self.conn = conn
conn.connect(self.logfile_condoor)
self.assertEqual(conn.is_discovered, True, "Not discovered properly")
self.assertEqual(conn.hostname, "PAN-5205-ASR903", "Wrong Hostname: {}".format(conn.hostname))
self.assertEqual(conn.family, "ASR900", "Wrong Family: {}".format(conn.family))
self.assertEqual(conn.platform, "ASR-903", "Wrong Platform: {}".format(conn.platform))
self.assertEqual(conn.os_type, "XE", "Wrong OS Type: {}".format(conn.os_type))
self.assertEqual(conn.os_version, "03.18.00.S", "Wrong Version: {}".format(conn.os_version))
self.assertEqual(conn.udi['name'], "Chassis", "Wrong Name: {}".format(conn.udi['name']))
self.assertEqual(conn.udi['description'], "ASR 903 Series Router Chassis",
"Wrong Description: {}".format(conn.udi['description']))
self.assertEqual(conn.udi['pid'], "ASR-903", "Wrong PID: {}".format(conn.udi['pid']))
self.assertEqual(conn.udi['vid'], "V01", "Wrong VID: {}".format(conn.udi['vid']))
self.assertEqual(conn.udi['sn'], "FOX1717P569", "Wrong S/N: {}".format(conn.udi['sn']))
self.assertEqual(conn.prompt, "PAN-5205-ASR903#", "Wrong Prompt: {}".format(conn.prompt))
with self.assertRaises(condoor.CommandSyntaxError):
conn.send("wrongcommand")
conn.disconnect()
def test_ASR903_3_connection_wrong_password(self):
"""ASR903: Test wrong password"""
urls = ["telnet://:password@127.0.0.1:10026/?enable_password=admin"]
self.conn = condoor.Connection("host", urls, log_session=self.log_session, log_level=self.log_level)
with self.assertRaises(condoor.ConnectionAuthenticationError):
self.conn.connect(self.logfile_condoor)
def test_ASR903_4_connection_wrong_enable_password(self):
"""ASR903: Test wrong enable password"""
urls = ["telnet://:password@127.0.0.1:10026/?enable_password=admin"]
self.conn = condoor.Connection("host", urls, log_session=self.log_session, log_level=self.log_level)
with self.assertRaises(condoor.ConnectionAuthenticationError):
self.conn.connect(self.logfile_condoor)
if __name__ == '__main__':
from unittest import main
main()
| 51.755556
| 108
| 0.667024
| 565
| 4,658
| 5.387611
| 0.175221
| 0.118265
| 0.149803
| 0.072273
| 0.827858
| 0.810118
| 0.810118
| 0.810118
| 0.810118
| 0.810118
| 0
| 0.043591
| 0.172606
| 4,658
| 89
| 109
| 52.337079
| 0.746238
| 0.033491
| 0
| 0.716418
| 0
| 0
| 0.222272
| 0.051774
| 0
| 0
| 0
| 0
| 0.41791
| 1
| 0.089552
| false
| 0.104478
| 0.074627
| 0
| 0.179104
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
d5e4ed019c6d56d6d3054e5be07c32e3e05e391f
| 9,378
|
py
|
Python
|
utils/datasets.py
|
Lornatang/PyTorch-AlexNet
|
c5276b17b50ca0e29244d3b46b94ad5aa007d8e8
|
[
"Apache-2.0"
] | 4
|
2019-09-02T09:09:11.000Z
|
2020-04-10T00:32:19.000Z
|
utils/datasets.py
|
Lornatang/ClassifierGAN
|
c5276b17b50ca0e29244d3b46b94ad5aa007d8e8
|
[
"Apache-2.0"
] | null | null | null |
utils/datasets.py
|
Lornatang/ClassifierGAN
|
c5276b17b50ca0e29244d3b46b94ad5aa007d8e8
|
[
"Apache-2.0"
] | 1
|
2019-12-09T05:56:57.000Z
|
2019-12-09T05:56:57.000Z
|
# Copyright 2019 Lorna Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Intelligent simplify code volume, easy to load data"""
import torch.utils.data
import torchvision.transforms as transforms
import datasets
def load_datasets(name, root, batch_size):
if name == "mnist":
train_dataset = datasets.MNIST(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(28),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.MNIST(root=root,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
elif name == "fmnist":
train_dataset = datasets.FashionMNIST(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(28),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.FashionMNIST(root=root,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
elif name == "kmnist":
train_dataset = datasets.KMNIST(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(28),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.KMNIST(root=root,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
elif name == "qmnist":
train_dataset = datasets.QMNIST(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(28),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.QMNIST(root=root,
download=True,
what="test50k",
train=False,
transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5]),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
elif name == "cifar10":
train_dataset = datasets.CIFAR10(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(32),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.CIFAR10(root=root,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(32),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
elif name == "cifar100":
train_dataset = datasets.CIFAR100(root=root,
download=True,
train=True,
transform=transforms.Compose([
transforms.Resize(32),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=8)
test_dataset = datasets.CIFAR100(root=root,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(32),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, num_workers=8)
return train_dataloader, test_dataloader
| 52.1
| 95
| 0.42557
| 705
| 9,378
| 5.539007
| 0.164539
| 0.020487
| 0.021511
| 0.028681
| 0.823047
| 0.823047
| 0.823047
| 0.811012
| 0.772343
| 0.772343
| 0
| 0.029479
| 0.489977
| 9,378
| 179
| 96
| 52.391061
| 0.786954
| 0.075709
| 0
| 0.839161
| 0
| 0
| 0.005202
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006993
| false
| 0
| 0.020979
| 0
| 0.06993
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91350d9f7777715427d757b8ca733728f1918f3f
| 59,590
|
py
|
Python
|
sdk/python/pulumi_okta/policy/rule_signon.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-10-29T21:59:22.000Z
|
2021-11-08T12:00:24.000Z
|
sdk/python/pulumi_okta/policy/rule_signon.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2020-01-06T10:28:09.000Z
|
2022-03-25T19:52:40.000Z
|
sdk/python/pulumi_okta/policy/rule_signon.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-09-11T16:31:04.000Z
|
2020-11-24T12:23:17.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RuleSignonArgs', 'RuleSignon']
@pulumi.input_type
class RuleSignonArgs:
def __init__(__self__, *,
access: Optional[pulumi.Input[str]] = None,
authtype: Optional[pulumi.Input[str]] = None,
behaviors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
factor_sequences: Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]] = None,
mfa_lifetime: Optional[pulumi.Input[int]] = None,
mfa_prompt: Optional[pulumi.Input[str]] = None,
mfa_remember_device: Optional[pulumi.Input[bool]] = None,
mfa_required: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network_connection: Optional[pulumi.Input[str]] = None,
network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
policyid: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
risc_level: Optional[pulumi.Input[str]] = None,
session_idle: Optional[pulumi.Input[int]] = None,
session_lifetime: Optional[pulumi.Input[int]] = None,
session_persistent: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a RuleSignon resource.
:param pulumi.Input[str] access: Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
:param pulumi.Input[str] authtype: Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] behaviors: List of behavior IDs.
:param pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]] factor_sequences: Auth factor sequences. Should be set if `access = "CHALLENGE"`.
:param pulumi.Input[int] mfa_lifetime: Elapsed time before the next MFA challenge.
:param pulumi.Input[str] mfa_prompt: Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
:param pulumi.Input[bool] mfa_remember_device: Remember MFA device. The default `false`.
:param pulumi.Input[bool] mfa_required: Require MFA. By default is `false`.
:param pulumi.Input[str] name: Policy Rule Name.
:param pulumi.Input[str] network_connection: Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_excludes: The network zones to exclude. Conflicts with `network_includes`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_includes: The network zones to include. Conflicts with `network_excludes`.
:param pulumi.Input[str] policy_id: Policy ID.
:param pulumi.Input[str] policyid: Policy ID.
:param pulumi.Input[int] priority: Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
:param pulumi.Input[str] risc_level: Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
:param pulumi.Input[int] session_idle: Max minutes a session can be idle.,
:param pulumi.Input[int] session_lifetime: Max minutes a session is active: Disable = 0.
:param pulumi.Input[bool] session_persistent: Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
:param pulumi.Input[str] status: Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users_excludeds: Set of User IDs to Exclude
"""
if access is not None:
pulumi.set(__self__, "access", access)
if authtype is not None:
pulumi.set(__self__, "authtype", authtype)
if behaviors is not None:
pulumi.set(__self__, "behaviors", behaviors)
if factor_sequences is not None:
pulumi.set(__self__, "factor_sequences", factor_sequences)
if mfa_lifetime is not None:
pulumi.set(__self__, "mfa_lifetime", mfa_lifetime)
if mfa_prompt is not None:
pulumi.set(__self__, "mfa_prompt", mfa_prompt)
if mfa_remember_device is not None:
pulumi.set(__self__, "mfa_remember_device", mfa_remember_device)
if mfa_required is not None:
pulumi.set(__self__, "mfa_required", mfa_required)
if name is not None:
pulumi.set(__self__, "name", name)
if network_connection is not None:
pulumi.set(__self__, "network_connection", network_connection)
if network_excludes is not None:
pulumi.set(__self__, "network_excludes", network_excludes)
if network_includes is not None:
pulumi.set(__self__, "network_includes", network_includes)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if policyid is not None:
warnings.warn("""Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""", DeprecationWarning)
pulumi.log.warn("""policyid is deprecated: Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""")
if policyid is not None:
pulumi.set(__self__, "policyid", policyid)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if risc_level is not None:
pulumi.set(__self__, "risc_level", risc_level)
if session_idle is not None:
pulumi.set(__self__, "session_idle", session_idle)
if session_lifetime is not None:
pulumi.set(__self__, "session_lifetime", session_lifetime)
if session_persistent is not None:
pulumi.set(__self__, "session_persistent", session_persistent)
if status is not None:
pulumi.set(__self__, "status", status)
if users_excludeds is not None:
pulumi.set(__self__, "users_excludeds", users_excludeds)
@property
@pulumi.getter
def access(self) -> Optional[pulumi.Input[str]]:
"""
Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
"""
return pulumi.get(self, "access")
@access.setter
def access(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access", value)
@property
@pulumi.getter
def authtype(self) -> Optional[pulumi.Input[str]]:
"""
Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
"""
return pulumi.get(self, "authtype")
@authtype.setter
def authtype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authtype", value)
@property
@pulumi.getter
def behaviors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of behavior IDs.
"""
return pulumi.get(self, "behaviors")
@behaviors.setter
def behaviors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "behaviors", value)
@property
@pulumi.getter(name="factorSequences")
def factor_sequences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]]:
"""
Auth factor sequences. Should be set if `access = "CHALLENGE"`.
"""
return pulumi.get(self, "factor_sequences")
@factor_sequences.setter
def factor_sequences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]]):
pulumi.set(self, "factor_sequences", value)
@property
@pulumi.getter(name="mfaLifetime")
def mfa_lifetime(self) -> Optional[pulumi.Input[int]]:
"""
Elapsed time before the next MFA challenge.
"""
return pulumi.get(self, "mfa_lifetime")
@mfa_lifetime.setter
def mfa_lifetime(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "mfa_lifetime", value)
@property
@pulumi.getter(name="mfaPrompt")
def mfa_prompt(self) -> Optional[pulumi.Input[str]]:
"""
Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
"""
return pulumi.get(self, "mfa_prompt")
@mfa_prompt.setter
def mfa_prompt(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mfa_prompt", value)
@property
@pulumi.getter(name="mfaRememberDevice")
def mfa_remember_device(self) -> Optional[pulumi.Input[bool]]:
"""
Remember MFA device. The default `false`.
"""
return pulumi.get(self, "mfa_remember_device")
@mfa_remember_device.setter
def mfa_remember_device(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "mfa_remember_device", value)
@property
@pulumi.getter(name="mfaRequired")
def mfa_required(self) -> Optional[pulumi.Input[bool]]:
"""
Require MFA. By default is `false`.
"""
return pulumi.get(self, "mfa_required")
@mfa_required.setter
def mfa_required(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "mfa_required", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Policy Rule Name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkConnection")
def network_connection(self) -> Optional[pulumi.Input[str]]:
"""
Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
"""
return pulumi.get(self, "network_connection")
@network_connection.setter
def network_connection(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_connection", value)
@property
@pulumi.getter(name="networkExcludes")
def network_excludes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The network zones to exclude. Conflicts with `network_includes`.
"""
return pulumi.get(self, "network_excludes")
@network_excludes.setter
def network_excludes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_excludes", value)
@property
@pulumi.getter(name="networkIncludes")
def network_includes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The network zones to include. Conflicts with `network_excludes`.
"""
return pulumi.get(self, "network_includes")
@network_includes.setter
def network_includes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_includes", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter
def policyid(self) -> Optional[pulumi.Input[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policyid")
@policyid.setter
def policyid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policyid", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="riscLevel")
def risc_level(self) -> Optional[pulumi.Input[str]]:
"""
Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
"""
return pulumi.get(self, "risc_level")
@risc_level.setter
def risc_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "risc_level", value)
@property
@pulumi.getter(name="sessionIdle")
def session_idle(self) -> Optional[pulumi.Input[int]]:
"""
Max minutes a session can be idle.,
"""
return pulumi.get(self, "session_idle")
@session_idle.setter
def session_idle(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_idle", value)
@property
@pulumi.getter(name="sessionLifetime")
def session_lifetime(self) -> Optional[pulumi.Input[int]]:
"""
Max minutes a session is active: Disable = 0.
"""
return pulumi.get(self, "session_lifetime")
@session_lifetime.setter
def session_lifetime(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_lifetime", value)
@property
@pulumi.getter(name="sessionPersistent")
def session_persistent(self) -> Optional[pulumi.Input[bool]]:
"""
Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
"""
return pulumi.get(self, "session_persistent")
@session_persistent.setter
def session_persistent(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "session_persistent", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="usersExcludeds")
def users_excludeds(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Set of User IDs to Exclude
"""
return pulumi.get(self, "users_excludeds")
@users_excludeds.setter
def users_excludeds(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "users_excludeds", value)
@pulumi.input_type
class _RuleSignonState:
def __init__(__self__, *,
access: Optional[pulumi.Input[str]] = None,
authtype: Optional[pulumi.Input[str]] = None,
behaviors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
factor_sequences: Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]] = None,
mfa_lifetime: Optional[pulumi.Input[int]] = None,
mfa_prompt: Optional[pulumi.Input[str]] = None,
mfa_remember_device: Optional[pulumi.Input[bool]] = None,
mfa_required: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network_connection: Optional[pulumi.Input[str]] = None,
network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
policyid: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
risc_level: Optional[pulumi.Input[str]] = None,
session_idle: Optional[pulumi.Input[int]] = None,
session_lifetime: Optional[pulumi.Input[int]] = None,
session_persistent: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering RuleSignon resources.
:param pulumi.Input[str] access: Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
:param pulumi.Input[str] authtype: Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] behaviors: List of behavior IDs.
:param pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]] factor_sequences: Auth factor sequences. Should be set if `access = "CHALLENGE"`.
:param pulumi.Input[int] mfa_lifetime: Elapsed time before the next MFA challenge.
:param pulumi.Input[str] mfa_prompt: Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
:param pulumi.Input[bool] mfa_remember_device: Remember MFA device. The default `false`.
:param pulumi.Input[bool] mfa_required: Require MFA. By default is `false`.
:param pulumi.Input[str] name: Policy Rule Name.
:param pulumi.Input[str] network_connection: Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_excludes: The network zones to exclude. Conflicts with `network_includes`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_includes: The network zones to include. Conflicts with `network_excludes`.
:param pulumi.Input[str] policy_id: Policy ID.
:param pulumi.Input[str] policyid: Policy ID.
:param pulumi.Input[int] priority: Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
:param pulumi.Input[str] risc_level: Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
:param pulumi.Input[int] session_idle: Max minutes a session can be idle.,
:param pulumi.Input[int] session_lifetime: Max minutes a session is active: Disable = 0.
:param pulumi.Input[bool] session_persistent: Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
:param pulumi.Input[str] status: Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users_excludeds: Set of User IDs to Exclude
"""
if access is not None:
pulumi.set(__self__, "access", access)
if authtype is not None:
pulumi.set(__self__, "authtype", authtype)
if behaviors is not None:
pulumi.set(__self__, "behaviors", behaviors)
if factor_sequences is not None:
pulumi.set(__self__, "factor_sequences", factor_sequences)
if mfa_lifetime is not None:
pulumi.set(__self__, "mfa_lifetime", mfa_lifetime)
if mfa_prompt is not None:
pulumi.set(__self__, "mfa_prompt", mfa_prompt)
if mfa_remember_device is not None:
pulumi.set(__self__, "mfa_remember_device", mfa_remember_device)
if mfa_required is not None:
pulumi.set(__self__, "mfa_required", mfa_required)
if name is not None:
pulumi.set(__self__, "name", name)
if network_connection is not None:
pulumi.set(__self__, "network_connection", network_connection)
if network_excludes is not None:
pulumi.set(__self__, "network_excludes", network_excludes)
if network_includes is not None:
pulumi.set(__self__, "network_includes", network_includes)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if policyid is not None:
warnings.warn("""Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""", DeprecationWarning)
pulumi.log.warn("""policyid is deprecated: Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""")
if policyid is not None:
pulumi.set(__self__, "policyid", policyid)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if risc_level is not None:
pulumi.set(__self__, "risc_level", risc_level)
if session_idle is not None:
pulumi.set(__self__, "session_idle", session_idle)
if session_lifetime is not None:
pulumi.set(__self__, "session_lifetime", session_lifetime)
if session_persistent is not None:
pulumi.set(__self__, "session_persistent", session_persistent)
if status is not None:
pulumi.set(__self__, "status", status)
if users_excludeds is not None:
pulumi.set(__self__, "users_excludeds", users_excludeds)
@property
@pulumi.getter
def access(self) -> Optional[pulumi.Input[str]]:
"""
Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
"""
return pulumi.get(self, "access")
@access.setter
def access(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access", value)
@property
@pulumi.getter
def authtype(self) -> Optional[pulumi.Input[str]]:
"""
Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
"""
return pulumi.get(self, "authtype")
@authtype.setter
def authtype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authtype", value)
@property
@pulumi.getter
def behaviors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of behavior IDs.
"""
return pulumi.get(self, "behaviors")
@behaviors.setter
def behaviors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "behaviors", value)
@property
@pulumi.getter(name="factorSequences")
def factor_sequences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]]:
"""
Auth factor sequences. Should be set if `access = "CHALLENGE"`.
"""
return pulumi.get(self, "factor_sequences")
@factor_sequences.setter
def factor_sequences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RuleSignonFactorSequenceArgs']]]]):
pulumi.set(self, "factor_sequences", value)
@property
@pulumi.getter(name="mfaLifetime")
def mfa_lifetime(self) -> Optional[pulumi.Input[int]]:
"""
Elapsed time before the next MFA challenge.
"""
return pulumi.get(self, "mfa_lifetime")
@mfa_lifetime.setter
def mfa_lifetime(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "mfa_lifetime", value)
@property
@pulumi.getter(name="mfaPrompt")
def mfa_prompt(self) -> Optional[pulumi.Input[str]]:
"""
Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
"""
return pulumi.get(self, "mfa_prompt")
@mfa_prompt.setter
def mfa_prompt(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mfa_prompt", value)
@property
@pulumi.getter(name="mfaRememberDevice")
def mfa_remember_device(self) -> Optional[pulumi.Input[bool]]:
"""
Remember MFA device. The default `false`.
"""
return pulumi.get(self, "mfa_remember_device")
@mfa_remember_device.setter
def mfa_remember_device(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "mfa_remember_device", value)
@property
@pulumi.getter(name="mfaRequired")
def mfa_required(self) -> Optional[pulumi.Input[bool]]:
"""
Require MFA. By default is `false`.
"""
return pulumi.get(self, "mfa_required")
@mfa_required.setter
def mfa_required(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "mfa_required", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Policy Rule Name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkConnection")
def network_connection(self) -> Optional[pulumi.Input[str]]:
"""
Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
"""
return pulumi.get(self, "network_connection")
@network_connection.setter
def network_connection(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_connection", value)
@property
@pulumi.getter(name="networkExcludes")
def network_excludes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The network zones to exclude. Conflicts with `network_includes`.
"""
return pulumi.get(self, "network_excludes")
@network_excludes.setter
def network_excludes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_excludes", value)
@property
@pulumi.getter(name="networkIncludes")
def network_includes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The network zones to include. Conflicts with `network_excludes`.
"""
return pulumi.get(self, "network_includes")
@network_includes.setter
def network_includes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_includes", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter
def policyid(self) -> Optional[pulumi.Input[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policyid")
@policyid.setter
def policyid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policyid", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="riscLevel")
def risc_level(self) -> Optional[pulumi.Input[str]]:
"""
Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
"""
return pulumi.get(self, "risc_level")
@risc_level.setter
def risc_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "risc_level", value)
@property
@pulumi.getter(name="sessionIdle")
def session_idle(self) -> Optional[pulumi.Input[int]]:
"""
Max minutes a session can be idle.,
"""
return pulumi.get(self, "session_idle")
@session_idle.setter
def session_idle(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_idle", value)
@property
@pulumi.getter(name="sessionLifetime")
def session_lifetime(self) -> Optional[pulumi.Input[int]]:
"""
Max minutes a session is active: Disable = 0.
"""
return pulumi.get(self, "session_lifetime")
@session_lifetime.setter
def session_lifetime(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_lifetime", value)
@property
@pulumi.getter(name="sessionPersistent")
def session_persistent(self) -> Optional[pulumi.Input[bool]]:
"""
Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
"""
return pulumi.get(self, "session_persistent")
@session_persistent.setter
def session_persistent(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "session_persistent", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="usersExcludeds")
def users_excludeds(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Set of User IDs to Exclude
"""
return pulumi.get(self, "users_excludeds")
@users_excludeds.setter
def users_excludeds(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "users_excludeds", value)
class RuleSignon(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access: Optional[pulumi.Input[str]] = None,
authtype: Optional[pulumi.Input[str]] = None,
behaviors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
factor_sequences: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleSignonFactorSequenceArgs']]]]] = None,
mfa_lifetime: Optional[pulumi.Input[int]] = None,
mfa_prompt: Optional[pulumi.Input[str]] = None,
mfa_remember_device: Optional[pulumi.Input[bool]] = None,
mfa_required: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network_connection: Optional[pulumi.Input[str]] = None,
network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
policyid: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
risc_level: Optional[pulumi.Input[str]] = None,
session_idle: Optional[pulumi.Input[int]] = None,
session_lifetime: Optional[pulumi.Input[int]] = None,
session_persistent: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Creates a Sign On Policy Rule.
## Example Usage
```python
import pulumi
import pulumi_okta as okta
test = okta.policy.Signon("test",
status="ACTIVE",
description="Example Policy")
new_city = okta.get_behaviour(name="New City")
example = okta.policy.RuleSignon("example",
access="CHALLENGE",
authtype="RADIUS",
network_connection="ANYWHERE",
policy_id=okta_policy_signon["example"]["id"],
status="ACTIVE",
risc_level="HIGH",
behaviors=[new_city.id],
factor_sequences=[
okta.policy.RuleSignonFactorSequenceArgs(
primary_criteria_factor_type="token:hotp",
primary_criteria_provider="CUSTOM",
secondary_criterias=[
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token:software:totp",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="push",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="password",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="question",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="sms",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token:software:totp",
provider="GOOGLE",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="email",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="call",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="webauthn",
provider="FIDO",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token",
provider="RSA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token",
provider="SYMANTEC",
),
],
),
okta.policy.RuleSignonFactorSequenceArgs(
primary_criteria_factor_type="token:software:totp",
primary_criteria_provider="OKTA",
),
])
```
## Import
A Policy Rule can be imported via the Policy and Rule ID.
```sh
$ pulumi import okta:policy/ruleSignon:RuleSignon example <policy id>/<rule id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access: Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
:param pulumi.Input[str] authtype: Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] behaviors: List of behavior IDs.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleSignonFactorSequenceArgs']]]] factor_sequences: Auth factor sequences. Should be set if `access = "CHALLENGE"`.
:param pulumi.Input[int] mfa_lifetime: Elapsed time before the next MFA challenge.
:param pulumi.Input[str] mfa_prompt: Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
:param pulumi.Input[bool] mfa_remember_device: Remember MFA device. The default `false`.
:param pulumi.Input[bool] mfa_required: Require MFA. By default is `false`.
:param pulumi.Input[str] name: Policy Rule Name.
:param pulumi.Input[str] network_connection: Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_excludes: The network zones to exclude. Conflicts with `network_includes`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_includes: The network zones to include. Conflicts with `network_excludes`.
:param pulumi.Input[str] policy_id: Policy ID.
:param pulumi.Input[str] policyid: Policy ID.
:param pulumi.Input[int] priority: Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
:param pulumi.Input[str] risc_level: Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
:param pulumi.Input[int] session_idle: Max minutes a session can be idle.,
:param pulumi.Input[int] session_lifetime: Max minutes a session is active: Disable = 0.
:param pulumi.Input[bool] session_persistent: Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
:param pulumi.Input[str] status: Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users_excludeds: Set of User IDs to Exclude
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[RuleSignonArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a Sign On Policy Rule.
## Example Usage
```python
import pulumi
import pulumi_okta as okta
test = okta.policy.Signon("test",
status="ACTIVE",
description="Example Policy")
new_city = okta.get_behaviour(name="New City")
example = okta.policy.RuleSignon("example",
access="CHALLENGE",
authtype="RADIUS",
network_connection="ANYWHERE",
policy_id=okta_policy_signon["example"]["id"],
status="ACTIVE",
risc_level="HIGH",
behaviors=[new_city.id],
factor_sequences=[
okta.policy.RuleSignonFactorSequenceArgs(
primary_criteria_factor_type="token:hotp",
primary_criteria_provider="CUSTOM",
secondary_criterias=[
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token:software:totp",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="push",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="password",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="question",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="sms",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token:software:totp",
provider="GOOGLE",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="email",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="call",
provider="OKTA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="webauthn",
provider="FIDO",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token",
provider="RSA",
),
okta.policy.RuleSignonFactorSequenceSecondaryCriteriaArgs(
factor_type="token",
provider="SYMANTEC",
),
],
),
okta.policy.RuleSignonFactorSequenceArgs(
primary_criteria_factor_type="token:software:totp",
primary_criteria_provider="OKTA",
),
])
```
## Import
A Policy Rule can be imported via the Policy and Rule ID.
```sh
$ pulumi import okta:policy/ruleSignon:RuleSignon example <policy id>/<rule id>
```
:param str resource_name: The name of the resource.
:param RuleSignonArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RuleSignonArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access: Optional[pulumi.Input[str]] = None,
authtype: Optional[pulumi.Input[str]] = None,
behaviors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
factor_sequences: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleSignonFactorSequenceArgs']]]]] = None,
mfa_lifetime: Optional[pulumi.Input[int]] = None,
mfa_prompt: Optional[pulumi.Input[str]] = None,
mfa_remember_device: Optional[pulumi.Input[bool]] = None,
mfa_required: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network_connection: Optional[pulumi.Input[str]] = None,
network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
policyid: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
risc_level: Optional[pulumi.Input[str]] = None,
session_idle: Optional[pulumi.Input[int]] = None,
session_lifetime: Optional[pulumi.Input[int]] = None,
session_persistent: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RuleSignonArgs.__new__(RuleSignonArgs)
__props__.__dict__["access"] = access
__props__.__dict__["authtype"] = authtype
__props__.__dict__["behaviors"] = behaviors
__props__.__dict__["factor_sequences"] = factor_sequences
__props__.__dict__["mfa_lifetime"] = mfa_lifetime
__props__.__dict__["mfa_prompt"] = mfa_prompt
__props__.__dict__["mfa_remember_device"] = mfa_remember_device
__props__.__dict__["mfa_required"] = mfa_required
__props__.__dict__["name"] = name
__props__.__dict__["network_connection"] = network_connection
__props__.__dict__["network_excludes"] = network_excludes
__props__.__dict__["network_includes"] = network_includes
__props__.__dict__["policy_id"] = policy_id
if policyid is not None and not opts.urn:
warnings.warn("""Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""", DeprecationWarning)
pulumi.log.warn("""policyid is deprecated: Because of incorrect naming, 'policyid' field will be deprecated and then removed in the next versions of the provider. Please use 'policy_id' instead""")
__props__.__dict__["policyid"] = policyid
__props__.__dict__["priority"] = priority
__props__.__dict__["risc_level"] = risc_level
__props__.__dict__["session_idle"] = session_idle
__props__.__dict__["session_lifetime"] = session_lifetime
__props__.__dict__["session_persistent"] = session_persistent
__props__.__dict__["status"] = status
__props__.__dict__["users_excludeds"] = users_excludeds
super(RuleSignon, __self__).__init__(
'okta:policy/ruleSignon:RuleSignon',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access: Optional[pulumi.Input[str]] = None,
authtype: Optional[pulumi.Input[str]] = None,
behaviors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
factor_sequences: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleSignonFactorSequenceArgs']]]]] = None,
mfa_lifetime: Optional[pulumi.Input[int]] = None,
mfa_prompt: Optional[pulumi.Input[str]] = None,
mfa_remember_device: Optional[pulumi.Input[bool]] = None,
mfa_required: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network_connection: Optional[pulumi.Input[str]] = None,
network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
policyid: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
risc_level: Optional[pulumi.Input[str]] = None,
session_idle: Optional[pulumi.Input[int]] = None,
session_lifetime: Optional[pulumi.Input[int]] = None,
session_persistent: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'RuleSignon':
"""
Get an existing RuleSignon resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access: Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
:param pulumi.Input[str] authtype: Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] behaviors: List of behavior IDs.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleSignonFactorSequenceArgs']]]] factor_sequences: Auth factor sequences. Should be set if `access = "CHALLENGE"`.
:param pulumi.Input[int] mfa_lifetime: Elapsed time before the next MFA challenge.
:param pulumi.Input[str] mfa_prompt: Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
:param pulumi.Input[bool] mfa_remember_device: Remember MFA device. The default `false`.
:param pulumi.Input[bool] mfa_required: Require MFA. By default is `false`.
:param pulumi.Input[str] name: Policy Rule Name.
:param pulumi.Input[str] network_connection: Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_excludes: The network zones to exclude. Conflicts with `network_includes`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_includes: The network zones to include. Conflicts with `network_excludes`.
:param pulumi.Input[str] policy_id: Policy ID.
:param pulumi.Input[str] policyid: Policy ID.
:param pulumi.Input[int] priority: Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
:param pulumi.Input[str] risc_level: Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
:param pulumi.Input[int] session_idle: Max minutes a session can be idle.,
:param pulumi.Input[int] session_lifetime: Max minutes a session is active: Disable = 0.
:param pulumi.Input[bool] session_persistent: Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
:param pulumi.Input[str] status: Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] users_excludeds: Set of User IDs to Exclude
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RuleSignonState.__new__(_RuleSignonState)
__props__.__dict__["access"] = access
__props__.__dict__["authtype"] = authtype
__props__.__dict__["behaviors"] = behaviors
__props__.__dict__["factor_sequences"] = factor_sequences
__props__.__dict__["mfa_lifetime"] = mfa_lifetime
__props__.__dict__["mfa_prompt"] = mfa_prompt
__props__.__dict__["mfa_remember_device"] = mfa_remember_device
__props__.__dict__["mfa_required"] = mfa_required
__props__.__dict__["name"] = name
__props__.__dict__["network_connection"] = network_connection
__props__.__dict__["network_excludes"] = network_excludes
__props__.__dict__["network_includes"] = network_includes
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["policyid"] = policyid
__props__.__dict__["priority"] = priority
__props__.__dict__["risc_level"] = risc_level
__props__.__dict__["session_idle"] = session_idle
__props__.__dict__["session_lifetime"] = session_lifetime
__props__.__dict__["session_persistent"] = session_persistent
__props__.__dict__["status"] = status
__props__.__dict__["users_excludeds"] = users_excludeds
return RuleSignon(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def access(self) -> pulumi.Output[Optional[str]]:
"""
Allow or deny access based on the rule conditions: `"ALLOW"`, `"DENY"` or `"CHALLENGE"`. The default is `"ALLOW"`.
"""
return pulumi.get(self, "access")
@property
@pulumi.getter
def authtype(self) -> pulumi.Output[Optional[str]]:
"""
Authentication entrypoint: `"ANY"`, `"LDAP_INTERFACE"` or `"RADIUS"`.
"""
return pulumi.get(self, "authtype")
@property
@pulumi.getter
def behaviors(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of behavior IDs.
"""
return pulumi.get(self, "behaviors")
@property
@pulumi.getter(name="factorSequences")
def factor_sequences(self) -> pulumi.Output[Optional[Sequence['outputs.RuleSignonFactorSequence']]]:
"""
Auth factor sequences. Should be set if `access = "CHALLENGE"`.
"""
return pulumi.get(self, "factor_sequences")
@property
@pulumi.getter(name="mfaLifetime")
def mfa_lifetime(self) -> pulumi.Output[Optional[int]]:
"""
Elapsed time before the next MFA challenge.
"""
return pulumi.get(self, "mfa_lifetime")
@property
@pulumi.getter(name="mfaPrompt")
def mfa_prompt(self) -> pulumi.Output[Optional[str]]:
"""
Prompt for MFA based on the device used, a factor session lifetime, or every sign-on attempt: `"DEVICE"`, `"SESSION"` or `"ALWAYS"`.
"""
return pulumi.get(self, "mfa_prompt")
@property
@pulumi.getter(name="mfaRememberDevice")
def mfa_remember_device(self) -> pulumi.Output[Optional[bool]]:
"""
Remember MFA device. The default `false`.
"""
return pulumi.get(self, "mfa_remember_device")
@property
@pulumi.getter(name="mfaRequired")
def mfa_required(self) -> pulumi.Output[Optional[bool]]:
"""
Require MFA. By default is `false`.
"""
return pulumi.get(self, "mfa_required")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Policy Rule Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkConnection")
def network_connection(self) -> pulumi.Output[Optional[str]]:
"""
Network selection mode: `"ANYWHERE"`, `"ZONE"`, `"ON_NETWORK"`, or `"OFF_NETWORK"`.
"""
return pulumi.get(self, "network_connection")
@property
@pulumi.getter(name="networkExcludes")
def network_excludes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The network zones to exclude. Conflicts with `network_includes`.
"""
return pulumi.get(self, "network_excludes")
@property
@pulumi.getter(name="networkIncludes")
def network_includes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The network zones to include. Conflicts with `network_excludes`.
"""
return pulumi.get(self, "network_includes")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> pulumi.Output[Optional[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def policyid(self) -> pulumi.Output[Optional[str]]:
"""
Policy ID.
"""
return pulumi.get(self, "policyid")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[int]]:
"""
Policy Rule Priority, this attribute can be set to a valid priority. To avoid endless diff situation we error if an invalid priority is provided. API defaults it to the last (lowest) if not there.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="riscLevel")
def risc_level(self) -> pulumi.Output[Optional[str]]:
"""
Risc level: `"ANY"`, `"LOW"`, `"MEDIUM"` or `"HIGH"`. Default is `"ANY"`. It can be also
set to an empty string in case `RISC_SCORING` org feature flag is disabled.
"""
return pulumi.get(self, "risc_level")
@property
@pulumi.getter(name="sessionIdle")
def session_idle(self) -> pulumi.Output[Optional[int]]:
"""
Max minutes a session can be idle.,
"""
return pulumi.get(self, "session_idle")
@property
@pulumi.getter(name="sessionLifetime")
def session_lifetime(self) -> pulumi.Output[Optional[int]]:
"""
Max minutes a session is active: Disable = 0.
"""
return pulumi.get(self, "session_lifetime")
@property
@pulumi.getter(name="sessionPersistent")
def session_persistent(self) -> pulumi.Output[Optional[bool]]:
"""
Whether session cookies will last across browser sessions. Okta Administrators can never have persistent session cookies.
"""
return pulumi.get(self, "session_persistent")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
"""
Policy Rule Status: `"ACTIVE"` or `"INACTIVE"`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="usersExcludeds")
def users_excludeds(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Set of User IDs to Exclude
"""
return pulumi.get(self, "users_excludeds")
| 46.77394
| 239
| 0.623813
| 6,510
| 59,590
| 5.521659
| 0.045469
| 0.104657
| 0.0999
| 0.049574
| 0.95496
| 0.945307
| 0.936126
| 0.933929
| 0.932287
| 0.909141
| 0
| 0.000182
| 0.261436
| 59,590
| 1,273
| 240
| 46.810683
| 0.816569
| 0.349773
| 0
| 0.905465
| 1
| 0.008863
| 0.121374
| 0.009023
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165436
| false
| 0.001477
| 0.01034
| 0
| 0.274742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
914db2adc3a63c1c5d03454f4ed2bb40706630db
| 283
|
py
|
Python
|
ch13/04.py
|
hnccho/book
|
f659bc759dca6d4991183147db7ae04abb4265a4
|
[
"MIT"
] | 84
|
2017-01-13T04:57:20.000Z
|
2022-02-17T11:56:03.000Z
|
ch13/04.py
|
hnccho/book
|
f659bc759dca6d4991183147db7ae04abb4265a4
|
[
"MIT"
] | 3
|
2019-10-12T12:02:54.000Z
|
2020-04-13T12:09:57.000Z
|
ch13/04.py
|
hnccho/book
|
f659bc759dca6d4991183147db7ae04abb4265a4
|
[
"MIT"
] | 111
|
2016-09-22T09:02:12.000Z
|
2022-03-18T13:26:49.000Z
|
kakao_daily_ending_prices = {'2016-02-19': 92600,
'2016-02-18': 92400,
'2016-02-17': 92100,
'2016-02-16': 94300,
'2016-02-15': 92300}
print(kakao_daily_ending_prices)
| 40.428571
| 49
| 0.424028
| 29
| 283
| 3.931034
| 0.586207
| 0.263158
| 0.280702
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.422078
| 0.45583
| 283
| 7
| 50
| 40.428571
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0.176056
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e680b0f217ae6cf3851f553ea00135195e0d2bb3
| 2,780
|
py
|
Python
|
tests/test_bidir_mnist.py
|
aripdotcom/Dynamical-Isometry-Distillation
|
b0e397da55edbf764d2849669bf8b60a4abf26ac
|
[
"MIT"
] | 1
|
2019-09-02T21:22:39.000Z
|
2019-09-02T21:22:39.000Z
|
tests/test_bidir_mnist.py
|
postmachines/Linear-Distillation-Learning
|
b0e397da55edbf764d2849669bf8b60a4abf26ac
|
[
"MIT"
] | null | null | null |
tests/test_bidir_mnist.py
|
postmachines/Linear-Distillation-Learning
|
b0e397da55edbf764d2849669bf8b60a4abf26ac
|
[
"MIT"
] | 1
|
2019-05-29T20:07:58.000Z
|
2019-05-29T20:07:58.000Z
|
import unittest
from scripts.bidir_train_mnist import run_experiment_full_test
from scripts.utils import preprocess_config
class TestMnistBidir(unittest.TestCase):
def test_no_logging_3_shot(self):
config = {
'dataset': 'mnist',
'way': '10',
'train_shot': '3',
'test_shot': '1',
'loss': 'MSE',
'epochs': '5',
'trials': '1',
'silent': '1',
'split': 'test',
'x_dim': '28',
'z_dim': '2000',
'lr_predictor': '1e-3',
'lr_target': '1e-3',
'channels': '1',
'gpu': '0',
'test_batch': '2000',
'log_test_accuracy': '0'
}
config = preprocess_config(config)
run_experiment_full_test(config)
def test_test_accuracy_logging_3_shot(self):
config = {
'dataset': 'mnist',
'way': '10',
'train_shot': '3',
'test_shot': '1',
'loss': 'MSE',
'epochs': '5',
'trials': '1',
'silent': '1',
'split': 'test',
'x_dim': '28',
'z_dim': '2000',
'lr_predictor': '1e-3',
'lr_target': '1e-3',
'channels': '1',
'gpu': '0',
'test_batch': '2000',
'log_test_accuracy': '1'
}
config = preprocess_config(config)
run_experiment_full_test(config)
def test_1_shot(self):
config = {
'dataset': 'mnist',
'way': '10',
'train_shot': '1',
'test_shot': '1',
'loss': 'MSE',
'epochs': '5',
'trials': '1',
'silent': '1',
'split': 'test',
'x_dim': '28',
'z_dim': '2000',
'lr_predictor': '1e-3',
'lr_target': '1e-3',
'channels': '1',
'gpu': '0',
'test_batch': '2000',
'log_test_accuracy': '0'
}
config = preprocess_config(config)
run_experiment_full_test(config)
def test_10_shot(self):
config = {
'dataset': 'mnist',
'way': '10',
'train_shot': '10',
'test_shot': '1',
'loss': 'MSE',
'epochs': '5',
'trials': '1',
'silent': '1',
'split': 'test',
'x_dim': '28',
'z_dim': '2000',
'lr_predictor': '1e-3',
'lr_target': '1e-3',
'channels': '1',
'gpu': '0',
'test_batch': '2000',
'log_test_accuracy': '0'
}
config = preprocess_config(config)
run_experiment_full_test(config)
| 28.367347
| 62
| 0.419065
| 267
| 2,780
| 4.097378
| 0.183521
| 0.021938
| 0.077697
| 0.095978
| 0.846435
| 0.846435
| 0.846435
| 0.846435
| 0.846435
| 0.773309
| 0
| 0.062692
| 0.414748
| 2,780
| 98
| 63
| 28.367347
| 0.609711
| 0
| 0
| 0.836957
| 0
| 0
| 0.237684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.032609
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e69052fbdff6b67c20671e53b3de4612b1dca6ae
| 31,003
|
py
|
Python
|
jotleaf/main/migrations/0001_initial.py
|
reverie/jotleaf.com
|
86311b546bb5bae7ba826f5576ea82ac515e8b7d
|
[
"MIT"
] | 1
|
2020-10-25T15:10:43.000Z
|
2020-10-25T15:10:43.000Z
|
jotleaf/main/migrations/0001_initial.py
|
reverie/jotleaf.com
|
86311b546bb5bae7ba826f5576ea82ac515e8b7d
|
[
"MIT"
] | null | null | null |
jotleaf/main/migrations/0001_initial.py
|
reverie/jotleaf.com
|
86311b546bb5bae7ba826f5576ea82ac515e8b7d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Page'
db.create_table(u'main_page', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'], null=True, blank=True)),
('creator_session_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('published', self.gf('django.db.models.fields.BooleanField')(default=False)),
('published_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)),
('text_writability', self.gf('django.db.models.fields.IntegerField')(default=3)),
('image_writability', self.gf('django.db.models.fields.IntegerField')(default=3)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('short_url', self.gf('django.db.models.fields.SlugField')(max_length=50, null=True, blank=True)),
('bg_color', self.gf('django.db.models.fields.CharField')(default='#fafafa', max_length=32, blank=True)),
('bg_texture', self.gf('django.db.models.fields.CharField')(default='light_wool_midalpha.png', max_length=1024, blank=True)),
('bg_fn', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('default_textitem_color', self.gf('django.db.models.fields.CharField')(default='#000', max_length=32, blank=True)),
('default_textitem_bg_color', self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True)),
('default_textitem_font_size', self.gf('django.db.models.fields.PositiveIntegerField')(default=13, null=True, blank=True)),
('default_textitem_font', self.gf('django.db.models.fields.CharField')(default='Arial', max_length=32, blank=True)),
('default_textitem_bg_texture', self.gf('django.db.models.fields.CharField')(max_length=1024, blank=True)),
('use_custom_admin_style', self.gf('django.db.models.fields.BooleanField')(default=False)),
('admin_textitem_color', self.gf('django.db.models.fields.CharField')(default='#000', max_length=32, blank=True)),
('admin_textitem_bg_color', self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True)),
('admin_textitem_font_size', self.gf('django.db.models.fields.PositiveIntegerField')(default=13, null=True, blank=True)),
('admin_textitem_bg_texture', self.gf('django.db.models.fields.CharField')(max_length=1024, blank=True)),
('admin_textitem_font', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
))
db.send_create_signal(u'main', ['Page'])
# Adding model 'TextItem'
db.create_table(u'main_textitem', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Page'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'], null=True)),
('creator_window_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_session_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('x', self.gf('django.db.models.fields.IntegerField')()),
('y', self.gf('django.db.models.fields.IntegerField')()),
('height', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('width', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('border_color', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('border_width', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('border_radius', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
('editable', self.gf('django.db.models.fields.BooleanField')(default=False)),
('link_to_url', self.gf('django.db.models.fields.TextField')(blank=True)),
('color', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('bg_color', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('bg_texture', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('font_size', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('font', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
))
db.send_create_signal(u'main', ['TextItem'])
# Adding model 'ImageItem'
db.create_table(u'main_imageitem', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Page'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'], null=True)),
('creator_window_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_session_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('x', self.gf('django.db.models.fields.IntegerField')()),
('y', self.gf('django.db.models.fields.IntegerField')()),
('height', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('width', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('border_color', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('border_width', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('border_radius', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('src', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('link_to_url', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'main', ['ImageItem'])
# Adding model 'EmbedItem'
db.create_table(u'main_embeditem', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Page'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'], null=True)),
('creator_window_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_session_id', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('creator_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('x', self.gf('django.db.models.fields.IntegerField')()),
('y', self.gf('django.db.models.fields.IntegerField')()),
('height', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('width', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('border_color', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('border_width', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('border_radius', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('original_url', self.gf('django.db.models.fields.TextField')(blank=True)),
('embedly_data', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'main', ['EmbedItem'])
# Adding model 'Membership'
db.create_table(u'main_membership', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Page'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'])),
))
db.send_create_signal(u'main', ['Membership'])
# Adding unique constraint on 'Membership', fields ['page', 'user']
db.create_unique(u'main_membership', ['page_id', 'user_id'])
# Adding model 'PageView'
db.create_table(u'main_pageview', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.CustomUser'], null=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Page'])),
('ip_address', self.gf('django.db.models.fields.IPAddressField')(max_length=15)),
('sessionid', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
))
db.send_create_signal(u'main', ['PageView'])
# Adding model 'CustomUser'
db.create_table('auth_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'main', ['CustomUser'])
# Adding M2M table for field groups on 'CustomUser'
db.create_table('auth_user_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('customuser', models.ForeignKey(orm[u'main.customuser'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique('auth_user_groups', ['customuser_id', 'group_id'])
# Adding M2M table for field user_permissions on 'CustomUser'
db.create_table('auth_user_user_permissions', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('customuser', models.ForeignKey(orm[u'main.customuser'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique('auth_user_user_permissions', ['customuser_id', 'permission_id'])
# Adding model 'Follow'
db.create_table(u'main_follow', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='friends', to=orm['main.CustomUser'])),
('target', self.gf('django.db.models.fields.related.ForeignKey')(related_name='followers', to=orm['main.CustomUser'])),
))
db.send_create_signal(u'main', ['Follow'])
# Adding unique constraint on 'Follow', fields ['user', 'target']
db.create_unique(u'main_follow', ['user_id', 'target_id'])
def backwards(self, orm):
# Removing unique constraint on 'Follow', fields ['user', 'target']
db.delete_unique(u'main_follow', ['user_id', 'target_id'])
# Removing unique constraint on 'Membership', fields ['page', 'user']
db.delete_unique(u'main_membership', ['page_id', 'user_id'])
# Deleting model 'Page'
db.delete_table(u'main_page')
# Deleting model 'TextItem'
db.delete_table(u'main_textitem')
# Deleting model 'ImageItem'
db.delete_table(u'main_imageitem')
# Deleting model 'EmbedItem'
db.delete_table(u'main_embeditem')
# Deleting model 'Membership'
db.delete_table(u'main_membership')
# Deleting model 'PageView'
db.delete_table(u'main_pageview')
# Deleting model 'CustomUser'
db.delete_table('auth_user')
# Removing M2M table for field groups on 'CustomUser'
db.delete_table('auth_user_groups')
# Removing M2M table for field user_permissions on 'CustomUser'
db.delete_table('auth_user_user_permissions')
# Deleting model 'Follow'
db.delete_table(u'main_follow')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.customuser': {
'Meta': {'object_name': 'CustomUser', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'main.embeditem': {
'Meta': {'object_name': 'EmbedItem'},
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'embedly_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'original_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
},
u'main.follow': {
'Meta': {'unique_together': "[['user', 'target']]", 'object_name': 'Follow'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': u"orm['main.CustomUser']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'friends'", 'to': u"orm['main.CustomUser']"})
},
u'main.imageitem': {
'Meta': {'object_name': 'ImageItem'},
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'link_to_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'src': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
},
u'main.membership': {
'Meta': {'unique_together': "[['page', 'user']]", 'object_name': 'Membership'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']"})
},
u'main.page': {
'Meta': {'object_name': 'Page'},
'admin_textitem_bg_color': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'admin_textitem_bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'admin_textitem_color': ('django.db.models.fields.CharField', [], {'default': "'#000'", 'max_length': '32', 'blank': 'True'}),
'admin_textitem_font': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'admin_textitem_font_size': ('django.db.models.fields.PositiveIntegerField', [], {'default': '13', 'null': 'True', 'blank': 'True'}),
'bg_color': ('django.db.models.fields.CharField', [], {'default': "'#fafafa'", 'max_length': '32', 'blank': 'True'}),
'bg_fn': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bg_texture': ('django.db.models.fields.CharField', [], {'default': "'light_wool_midalpha.png'", 'max_length': '1024', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'default_textitem_bg_color': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'default_textitem_bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'default_textitem_color': ('django.db.models.fields.CharField', [], {'default': "'#000'", 'max_length': '32', 'blank': 'True'}),
'default_textitem_font': ('django.db.models.fields.CharField', [], {'default': "'Arial'", 'max_length': '32', 'blank': 'True'}),
'default_textitem_font_size': ('django.db.models.fields.PositiveIntegerField', [], {'default': '13', 'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'image_writability': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'published_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'short_url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'text_writability': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'use_custom_admin_style': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'main.pageview': {
'Meta': {'object_name': 'PageView'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'sessionid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'})
},
u'main.textitem': {
'Meta': {'object_name': 'TextItem'},
'bg_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'editable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'font': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'font_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'link_to_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['main']
| 80.947781
| 187
| 0.605974
| 3,628
| 31,003
| 5.036384
| 0.046858
| 0.097198
| 0.16933
| 0.2419
| 0.9022
| 0.877353
| 0.869254
| 0.845063
| 0.7934
| 0.743706
| 0
| 0.009751
| 0.173016
| 31,003
| 383
| 188
| 80.947781
| 0.702914
| 0.02932
| 0
| 0.439024
| 0
| 0
| 0.506901
| 0.312382
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006098
| false
| 0.006098
| 0.012195
| 0
| 0.027439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6ba0b8402afbf89e98fd709a52024e8f59a786d
| 6,821
|
py
|
Python
|
loldib/getratings/models/NA/na_nocturne/na_nocturne_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nocturne/na_nocturne_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nocturne/na_nocturne_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nocturne_Mid_Aatrox(Ratings):
pass
class NA_Nocturne_Mid_Ahri(Ratings):
pass
class NA_Nocturne_Mid_Akali(Ratings):
pass
class NA_Nocturne_Mid_Alistar(Ratings):
pass
class NA_Nocturne_Mid_Amumu(Ratings):
pass
class NA_Nocturne_Mid_Anivia(Ratings):
pass
class NA_Nocturne_Mid_Annie(Ratings):
pass
class NA_Nocturne_Mid_Ashe(Ratings):
pass
class NA_Nocturne_Mid_AurelionSol(Ratings):
pass
class NA_Nocturne_Mid_Azir(Ratings):
pass
class NA_Nocturne_Mid_Bard(Ratings):
pass
class NA_Nocturne_Mid_Blitzcrank(Ratings):
pass
class NA_Nocturne_Mid_Brand(Ratings):
pass
class NA_Nocturne_Mid_Braum(Ratings):
pass
class NA_Nocturne_Mid_Caitlyn(Ratings):
pass
class NA_Nocturne_Mid_Camille(Ratings):
pass
class NA_Nocturne_Mid_Cassiopeia(Ratings):
pass
class NA_Nocturne_Mid_Chogath(Ratings):
pass
class NA_Nocturne_Mid_Corki(Ratings):
pass
class NA_Nocturne_Mid_Darius(Ratings):
pass
class NA_Nocturne_Mid_Diana(Ratings):
pass
class NA_Nocturne_Mid_Draven(Ratings):
pass
class NA_Nocturne_Mid_DrMundo(Ratings):
pass
class NA_Nocturne_Mid_Ekko(Ratings):
pass
class NA_Nocturne_Mid_Elise(Ratings):
pass
class NA_Nocturne_Mid_Evelynn(Ratings):
pass
class NA_Nocturne_Mid_Ezreal(Ratings):
pass
class NA_Nocturne_Mid_Fiddlesticks(Ratings):
pass
class NA_Nocturne_Mid_Fiora(Ratings):
pass
class NA_Nocturne_Mid_Fizz(Ratings):
pass
class NA_Nocturne_Mid_Galio(Ratings):
pass
class NA_Nocturne_Mid_Gangplank(Ratings):
pass
class NA_Nocturne_Mid_Garen(Ratings):
pass
class NA_Nocturne_Mid_Gnar(Ratings):
pass
class NA_Nocturne_Mid_Gragas(Ratings):
pass
class NA_Nocturne_Mid_Graves(Ratings):
pass
class NA_Nocturne_Mid_Hecarim(Ratings):
pass
class NA_Nocturne_Mid_Heimerdinger(Ratings):
pass
class NA_Nocturne_Mid_Illaoi(Ratings):
pass
class NA_Nocturne_Mid_Irelia(Ratings):
pass
class NA_Nocturne_Mid_Ivern(Ratings):
pass
class NA_Nocturne_Mid_Janna(Ratings):
pass
class NA_Nocturne_Mid_JarvanIV(Ratings):
pass
class NA_Nocturne_Mid_Jax(Ratings):
pass
class NA_Nocturne_Mid_Jayce(Ratings):
pass
class NA_Nocturne_Mid_Jhin(Ratings):
pass
class NA_Nocturne_Mid_Jinx(Ratings):
pass
class NA_Nocturne_Mid_Kalista(Ratings):
pass
class NA_Nocturne_Mid_Karma(Ratings):
pass
class NA_Nocturne_Mid_Karthus(Ratings):
pass
class NA_Nocturne_Mid_Kassadin(Ratings):
pass
class NA_Nocturne_Mid_Katarina(Ratings):
pass
class NA_Nocturne_Mid_Kayle(Ratings):
pass
class NA_Nocturne_Mid_Kayn(Ratings):
pass
class NA_Nocturne_Mid_Kennen(Ratings):
pass
class NA_Nocturne_Mid_Khazix(Ratings):
pass
class NA_Nocturne_Mid_Kindred(Ratings):
pass
class NA_Nocturne_Mid_Kled(Ratings):
pass
class NA_Nocturne_Mid_KogMaw(Ratings):
pass
class NA_Nocturne_Mid_Leblanc(Ratings):
pass
class NA_Nocturne_Mid_LeeSin(Ratings):
pass
class NA_Nocturne_Mid_Leona(Ratings):
pass
class NA_Nocturne_Mid_Lissandra(Ratings):
pass
class NA_Nocturne_Mid_Lucian(Ratings):
pass
class NA_Nocturne_Mid_Lulu(Ratings):
pass
class NA_Nocturne_Mid_Lux(Ratings):
pass
class NA_Nocturne_Mid_Malphite(Ratings):
pass
class NA_Nocturne_Mid_Malzahar(Ratings):
pass
class NA_Nocturne_Mid_Maokai(Ratings):
pass
class NA_Nocturne_Mid_MasterYi(Ratings):
pass
class NA_Nocturne_Mid_MissFortune(Ratings):
pass
class NA_Nocturne_Mid_MonkeyKing(Ratings):
pass
class NA_Nocturne_Mid_Mordekaiser(Ratings):
pass
class NA_Nocturne_Mid_Morgana(Ratings):
pass
class NA_Nocturne_Mid_Nami(Ratings):
pass
class NA_Nocturne_Mid_Nasus(Ratings):
pass
class NA_Nocturne_Mid_Nautilus(Ratings):
pass
class NA_Nocturne_Mid_Nidalee(Ratings):
pass
class NA_Nocturne_Mid_Nocturne(Ratings):
pass
class NA_Nocturne_Mid_Nunu(Ratings):
pass
class NA_Nocturne_Mid_Olaf(Ratings):
pass
class NA_Nocturne_Mid_Orianna(Ratings):
pass
class NA_Nocturne_Mid_Ornn(Ratings):
pass
class NA_Nocturne_Mid_Pantheon(Ratings):
pass
class NA_Nocturne_Mid_Poppy(Ratings):
pass
class NA_Nocturne_Mid_Quinn(Ratings):
pass
class NA_Nocturne_Mid_Rakan(Ratings):
pass
class NA_Nocturne_Mid_Rammus(Ratings):
pass
class NA_Nocturne_Mid_RekSai(Ratings):
pass
class NA_Nocturne_Mid_Renekton(Ratings):
pass
class NA_Nocturne_Mid_Rengar(Ratings):
pass
class NA_Nocturne_Mid_Riven(Ratings):
pass
class NA_Nocturne_Mid_Rumble(Ratings):
pass
class NA_Nocturne_Mid_Ryze(Ratings):
pass
class NA_Nocturne_Mid_Sejuani(Ratings):
pass
class NA_Nocturne_Mid_Shaco(Ratings):
pass
class NA_Nocturne_Mid_Shen(Ratings):
pass
class NA_Nocturne_Mid_Shyvana(Ratings):
pass
class NA_Nocturne_Mid_Singed(Ratings):
pass
class NA_Nocturne_Mid_Sion(Ratings):
pass
class NA_Nocturne_Mid_Sivir(Ratings):
pass
class NA_Nocturne_Mid_Skarner(Ratings):
pass
class NA_Nocturne_Mid_Sona(Ratings):
pass
class NA_Nocturne_Mid_Soraka(Ratings):
pass
class NA_Nocturne_Mid_Swain(Ratings):
pass
class NA_Nocturne_Mid_Syndra(Ratings):
pass
class NA_Nocturne_Mid_TahmKench(Ratings):
pass
class NA_Nocturne_Mid_Taliyah(Ratings):
pass
class NA_Nocturne_Mid_Talon(Ratings):
pass
class NA_Nocturne_Mid_Taric(Ratings):
pass
class NA_Nocturne_Mid_Teemo(Ratings):
pass
class NA_Nocturne_Mid_Thresh(Ratings):
pass
class NA_Nocturne_Mid_Tristana(Ratings):
pass
class NA_Nocturne_Mid_Trundle(Ratings):
pass
class NA_Nocturne_Mid_Tryndamere(Ratings):
pass
class NA_Nocturne_Mid_TwistedFate(Ratings):
pass
class NA_Nocturne_Mid_Twitch(Ratings):
pass
class NA_Nocturne_Mid_Udyr(Ratings):
pass
class NA_Nocturne_Mid_Urgot(Ratings):
pass
class NA_Nocturne_Mid_Varus(Ratings):
pass
class NA_Nocturne_Mid_Vayne(Ratings):
pass
class NA_Nocturne_Mid_Veigar(Ratings):
pass
class NA_Nocturne_Mid_Velkoz(Ratings):
pass
class NA_Nocturne_Mid_Vi(Ratings):
pass
class NA_Nocturne_Mid_Viktor(Ratings):
pass
class NA_Nocturne_Mid_Vladimir(Ratings):
pass
class NA_Nocturne_Mid_Volibear(Ratings):
pass
class NA_Nocturne_Mid_Warwick(Ratings):
pass
class NA_Nocturne_Mid_Xayah(Ratings):
pass
class NA_Nocturne_Mid_Xerath(Ratings):
pass
class NA_Nocturne_Mid_XinZhao(Ratings):
pass
class NA_Nocturne_Mid_Yasuo(Ratings):
pass
class NA_Nocturne_Mid_Yorick(Ratings):
pass
class NA_Nocturne_Mid_Zac(Ratings):
pass
class NA_Nocturne_Mid_Zed(Ratings):
pass
class NA_Nocturne_Mid_Ziggs(Ratings):
pass
class NA_Nocturne_Mid_Zilean(Ratings):
pass
class NA_Nocturne_Mid_Zyra(Ratings):
pass
| 16.357314
| 46
| 0.776133
| 972
| 6,821
| 5.020576
| 0.151235
| 0.197951
| 0.42418
| 0.509016
| 0.814139
| 0.814139
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 6,821
| 416
| 47
| 16.396635
| 0.854641
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e6dbce1a325c048b08cab2940fadfa7715c95078
| 39,597
|
py
|
Python
|
python/sbp/tracking.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
python/sbp/tracking.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
python/sbp/tracking.py
|
adammacudzinski/libsbp
|
33f82210ff1262f8d6c180215277a0bb5eb3b65c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (C) 2015-2018 Swift Navigation Inc.
# Contact: https://support.swiftnav.com
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
"""
Satellite code and carrier-phase tracking messages from the device.
"""
import json
import construct
from sbp.msg import SBP, SENDER_ID
from sbp.utils import fmt_repr, exclude_fields, walk_json_dict, containerize
from sbp.gnss import *
# Automatically generated from piksi/yaml/swiftnav/sbp/tracking.yaml with generate.py.
# Please do not hand edit!
class TrackingChannelState(object):
"""TrackingChannelState.
Tracking channel state for a specific satellite signal and
measured signal power.
Parameters
----------
sid : GnssSignal
GNSS signal being tracked
fcn : int
Frequency channel number (GLONASS only)
cn0 : int
Carrier-to-Noise density. Zero implies invalid cn0.
"""
_parser = construct.Embedded(construct.Struct(
'sid' / construct.Struct(GnssSignal._parser),
'fcn' / construct.Int8ul,
'cn0' / construct.Int8ul,))
__slots__ = [
'sid',
'fcn',
'cn0',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.sid = kwargs.pop('sid')
self.fcn = kwargs.pop('fcn')
self.cn0 = kwargs.pop('cn0')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = TrackingChannelState._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return TrackingChannelState.build(d)
class MeasurementState(object):
"""MeasurementState.
Measurement Engine tracking channel state for a specific satellite signal
and measured signal power.
The mesid field for Glonass can either
carry the FCN as 100 + FCN where FCN is in [-7, +6] or
the Slot ID (from 1 to 28)
Parameters
----------
mesid : GnssSignal
Measurement Engine GNSS signal being tracked (carries either Glonass FCN or SLOT)
cn0 : int
Carrier-to-Noise density. Zero implies invalid cn0.
"""
_parser = construct.Embedded(construct.Struct(
'mesid' / construct.Struct(GnssSignal._parser),
'cn0' / construct.Int8ul,))
__slots__ = [
'mesid',
'cn0',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.mesid = kwargs.pop('mesid')
self.cn0 = kwargs.pop('cn0')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = MeasurementState._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return MeasurementState.build(d)
class TrackingChannelCorrelation(object):
"""TrackingChannelCorrelation.
Structure containing in-phase and quadrature correlation components.
Parameters
----------
I : int
In-phase correlation
Q : int
Quadrature correlation
"""
_parser = construct.Embedded(construct.Struct(
'I' / construct.Int16sl,
'Q' / construct.Int16sl,))
__slots__ = [
'I',
'Q',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.I = kwargs.pop('I')
self.Q = kwargs.pop('Q')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = TrackingChannelCorrelation._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return TrackingChannelCorrelation.build(d)
class TrackingChannelCorrelationDep(object):
"""TrackingChannelCorrelationDep.
Structure containing in-phase and quadrature correlation components.
Parameters
----------
I : int
In-phase correlation
Q : int
Quadrature correlation
"""
_parser = construct.Embedded(construct.Struct(
'I' / construct.Int32sl,
'Q' / construct.Int32sl,))
__slots__ = [
'I',
'Q',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.I = kwargs.pop('I')
self.Q = kwargs.pop('Q')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = TrackingChannelCorrelationDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return TrackingChannelCorrelationDep.build(d)
class TrackingChannelStateDepA(object):
"""TrackingChannelStateDepA.
Deprecated.
Parameters
----------
state : int
Status of tracking channel
prn : int
PRN-1 being tracked
cn0 : float
Carrier-to-noise density
"""
_parser = construct.Embedded(construct.Struct(
'state' / construct.Int8ul,
'prn' / construct.Int8ul,
'cn0' / construct.Float32l,))
__slots__ = [
'state',
'prn',
'cn0',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.state = kwargs.pop('state')
self.prn = kwargs.pop('prn')
self.cn0 = kwargs.pop('cn0')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = TrackingChannelStateDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return TrackingChannelStateDepA.build(d)
class TrackingChannelStateDepB(object):
"""TrackingChannelStateDepB.
Deprecated.
Parameters
----------
state : int
Status of tracking channel
sid : GnssSignalDep
GNSS signal being tracked
cn0 : float
Carrier-to-noise density
"""
_parser = construct.Embedded(construct.Struct(
'state' / construct.Int8ul,
'sid' / construct.Struct(GnssSignalDep._parser),
'cn0' / construct.Float32l,))
__slots__ = [
'state',
'sid',
'cn0',
]
def __init__(self, payload=None, **kwargs):
if payload:
self.from_binary(payload)
else:
self.state = kwargs.pop('state')
self.sid = kwargs.pop('sid')
self.cn0 = kwargs.pop('cn0')
def __repr__(self):
return fmt_repr(self)
def from_binary(self, d):
p = TrackingChannelStateDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
d = dict([(k, getattr(obj, k)) for k in self.__slots__])
return TrackingChannelStateDepB.build(d)
SBP_MSG_TRACKING_STATE_DETAILED_DEP_A = 0x0021
class MsgTrackingStateDetailedDepA(SBP):
"""SBP class for message MSG_TRACKING_STATE_DETAILED_DEP_A (0x0021).
You can have MSG_TRACKING_STATE_DETAILED_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The tracking message returns a set tracking channel parameters for a
single tracking channel useful for debugging issues.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
recv_time : int
Receiver clock time.
tot : GPSTime
Time of transmission of signal from satellite. TOW only valid when
TOW status is decoded or propagated. WN only valid when week
number valid flag is set.
P : int
Pseudorange observation. Valid only when pseudorange valid flag is
set.
P_std : int
Pseudorange observation standard deviation. Valid only when
pseudorange valid flag is set.
L : CarrierPhase
Carrier phase observation with typical sign convention. Valid only
when PLL pessimistic lock is achieved.
cn0 : int
Carrier-to-Noise density
lock : int
Lock time. It is encoded according to DF402 from the RTCM 10403.2
Amendment 2 specification. Valid values range from 0 to 15.
sid : GnssSignal
GNSS signal identifier.
doppler : int
Carrier Doppler frequency.
doppler_std : int
Carrier Doppler frequency standard deviation.
uptime : int
Number of seconds of continuous tracking. Specifies how much time
signal is in continuous track.
clock_offset : int
TCXO clock offset. Valid only when valid clock valid flag is set.
clock_drift : int
TCXO clock drift. Valid only when valid clock valid flag is set.
corr_spacing : int
Early-Prompt (EP) and Prompt-Late (PL) correlators spacing.
acceleration : int
Acceleration. Valid only when acceleration valid flag is set.
sync_flags : int
Synchronization status flags.
tow_flags : int
TOW status flags.
track_flags : int
Tracking loop status flags.
nav_flags : int
Navigation data status flags.
pset_flags : int
Parameters sets flags.
misc_flags : int
Miscellaneous flags.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'recv_time' / construct.Int64ul,
'tot' / construct.Struct(GPSTime._parser),
'P' / construct.Int32ul,
'P_std' / construct.Int16ul,
'L' / construct.Struct(CarrierPhase._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int16ul,
'sid' / construct.Struct(GnssSignal._parser),
'doppler' / construct.Int32sl,
'doppler_std' / construct.Int16ul,
'uptime' / construct.Int32ul,
'clock_offset' / construct.Int16sl,
'clock_drift' / construct.Int16sl,
'corr_spacing' / construct.Int16ul,
'acceleration' / construct.Int8sl,
'sync_flags' / construct.Int8ul,
'tow_flags' / construct.Int8ul,
'track_flags' / construct.Int8ul,
'nav_flags' / construct.Int8ul,
'pset_flags' / construct.Int8ul,
'misc_flags' / construct.Int8ul,)
__slots__ = [
'recv_time',
'tot',
'P',
'P_std',
'L',
'cn0',
'lock',
'sid',
'doppler',
'doppler_std',
'uptime',
'clock_offset',
'clock_drift',
'corr_spacing',
'acceleration',
'sync_flags',
'tow_flags',
'track_flags',
'nav_flags',
'pset_flags',
'misc_flags',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingStateDetailedDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingStateDetailedDepA, self).__init__()
self.msg_type = SBP_MSG_TRACKING_STATE_DETAILED_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.recv_time = kwargs.pop('recv_time')
self.tot = kwargs.pop('tot')
self.P = kwargs.pop('P')
self.P_std = kwargs.pop('P_std')
self.L = kwargs.pop('L')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.sid = kwargs.pop('sid')
self.doppler = kwargs.pop('doppler')
self.doppler_std = kwargs.pop('doppler_std')
self.uptime = kwargs.pop('uptime')
self.clock_offset = kwargs.pop('clock_offset')
self.clock_drift = kwargs.pop('clock_drift')
self.corr_spacing = kwargs.pop('corr_spacing')
self.acceleration = kwargs.pop('acceleration')
self.sync_flags = kwargs.pop('sync_flags')
self.tow_flags = kwargs.pop('tow_flags')
self.track_flags = kwargs.pop('track_flags')
self.nav_flags = kwargs.pop('nav_flags')
self.pset_flags = kwargs.pop('pset_flags')
self.misc_flags = kwargs.pop('misc_flags')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingStateDetailedDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingStateDetailedDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingStateDetailedDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingStateDetailedDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingStateDetailedDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingStateDetailedDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_STATE_DETAILED_DEP = 0x0011
class MsgTrackingStateDetailedDep(SBP):
"""SBP class for message MSG_TRACKING_STATE_DETAILED_DEP (0x0011).
You can have MSG_TRACKING_STATE_DETAILED_DEP inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
recv_time : int
Receiver clock time.
tot : GPSTimeDep
Time of transmission of signal from satellite. TOW only valid when
TOW status is decoded or propagated. WN only valid when week
number valid flag is set.
P : int
Pseudorange observation. Valid only when pseudorange valid flag is
set.
P_std : int
Pseudorange observation standard deviation. Valid only when
pseudorange valid flag is set.
L : CarrierPhase
Carrier phase observation with typical sign convention. Valid only
when PLL pessimistic lock is achieved.
cn0 : int
Carrier-to-Noise density
lock : int
Lock time. It is encoded according to DF402 from the RTCM 10403.2
Amendment 2 specification. Valid values range from 0 to 15.
sid : GnssSignalDep
GNSS signal identifier.
doppler : int
Carrier Doppler frequency.
doppler_std : int
Carrier Doppler frequency standard deviation.
uptime : int
Number of seconds of continuous tracking. Specifies how much time
signal is in continuous track.
clock_offset : int
TCXO clock offset. Valid only when valid clock valid flag is set.
clock_drift : int
TCXO clock drift. Valid only when valid clock valid flag is set.
corr_spacing : int
Early-Prompt (EP) and Prompt-Late (PL) correlators spacing.
acceleration : int
Acceleration. Valid only when acceleration valid flag is set.
sync_flags : int
Synchronization status flags.
tow_flags : int
TOW status flags.
track_flags : int
Tracking loop status flags.
nav_flags : int
Navigation data status flags.
pset_flags : int
Parameters sets flags.
misc_flags : int
Miscellaneous flags.
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'recv_time' / construct.Int64ul,
'tot' / construct.Struct(GPSTimeDep._parser),
'P' / construct.Int32ul,
'P_std' / construct.Int16ul,
'L' / construct.Struct(CarrierPhase._parser),
'cn0' / construct.Int8ul,
'lock' / construct.Int16ul,
'sid' / construct.Struct(GnssSignalDep._parser),
'doppler' / construct.Int32sl,
'doppler_std' / construct.Int16ul,
'uptime' / construct.Int32ul,
'clock_offset' / construct.Int16sl,
'clock_drift' / construct.Int16sl,
'corr_spacing' / construct.Int16ul,
'acceleration' / construct.Int8sl,
'sync_flags' / construct.Int8ul,
'tow_flags' / construct.Int8ul,
'track_flags' / construct.Int8ul,
'nav_flags' / construct.Int8ul,
'pset_flags' / construct.Int8ul,
'misc_flags' / construct.Int8ul,)
__slots__ = [
'recv_time',
'tot',
'P',
'P_std',
'L',
'cn0',
'lock',
'sid',
'doppler',
'doppler_std',
'uptime',
'clock_offset',
'clock_drift',
'corr_spacing',
'acceleration',
'sync_flags',
'tow_flags',
'track_flags',
'nav_flags',
'pset_flags',
'misc_flags',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingStateDetailedDep,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingStateDetailedDep, self).__init__()
self.msg_type = SBP_MSG_TRACKING_STATE_DETAILED_DEP
self.sender = kwargs.pop('sender', SENDER_ID)
self.recv_time = kwargs.pop('recv_time')
self.tot = kwargs.pop('tot')
self.P = kwargs.pop('P')
self.P_std = kwargs.pop('P_std')
self.L = kwargs.pop('L')
self.cn0 = kwargs.pop('cn0')
self.lock = kwargs.pop('lock')
self.sid = kwargs.pop('sid')
self.doppler = kwargs.pop('doppler')
self.doppler_std = kwargs.pop('doppler_std')
self.uptime = kwargs.pop('uptime')
self.clock_offset = kwargs.pop('clock_offset')
self.clock_drift = kwargs.pop('clock_drift')
self.corr_spacing = kwargs.pop('corr_spacing')
self.acceleration = kwargs.pop('acceleration')
self.sync_flags = kwargs.pop('sync_flags')
self.tow_flags = kwargs.pop('tow_flags')
self.track_flags = kwargs.pop('track_flags')
self.nav_flags = kwargs.pop('nav_flags')
self.pset_flags = kwargs.pop('pset_flags')
self.misc_flags = kwargs.pop('misc_flags')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingStateDetailedDep.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingStateDetailedDep(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingStateDetailedDep._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingStateDetailedDep._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingStateDetailedDep._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingStateDetailedDep, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_STATE = 0x0041
class MsgTrackingState(SBP):
"""SBP class for message MSG_TRACKING_STATE (0x0041).
You can have MSG_TRACKING_STATE inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The tracking message returns a variable-length array of tracking
channel states. It reports status and carrier-to-noise density
measurements for all tracked satellites.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
states : array
Signal tracking channel state
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
construct.GreedyRange('states' / construct.Struct(TrackingChannelState._parser)),)
__slots__ = [
'states',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingState,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingState, self).__init__()
self.msg_type = SBP_MSG_TRACKING_STATE
self.sender = kwargs.pop('sender', SENDER_ID)
self.states = kwargs.pop('states')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingState.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingState(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingState._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingState._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingState._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingState, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_MEASUREMENT_STATE = 0x0061
class MsgMeasurementState(SBP):
"""SBP class for message MSG_MEASUREMENT_STATE (0x0061).
You can have MSG_MEASUREMENT_STATE inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
The tracking message returns a variable-length array of tracking
channel states. It reports status and carrier-to-noise density
measurements for all tracked satellites.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
states : array
ME signal tracking channel state
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
construct.GreedyRange('states' / construct.Struct(MeasurementState._parser)),)
__slots__ = [
'states',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgMeasurementState,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgMeasurementState, self).__init__()
self.msg_type = SBP_MSG_MEASUREMENT_STATE
self.sender = kwargs.pop('sender', SENDER_ID)
self.states = kwargs.pop('states')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgMeasurementState.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgMeasurementState(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgMeasurementState._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgMeasurementState._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgMeasurementState._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgMeasurementState, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_IQ = 0x002D
class MsgTrackingIq(SBP):
"""SBP class for message MSG_TRACKING_IQ (0x002D).
You can have MSG_TRACKING_IQ inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
When enabled, a tracking channel can output the correlations at each
update interval.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
channel : int
Tracking channel of origin
sid : GnssSignal
GNSS signal identifier
corrs : array
Early, Prompt and Late correlations
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'channel' / construct.Int8ul,
'sid' / construct.Struct(GnssSignal._parser),
'corrs' / construct.Array(3, construct.Byte),)
__slots__ = [
'channel',
'sid',
'corrs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingIq,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingIq, self).__init__()
self.msg_type = SBP_MSG_TRACKING_IQ
self.sender = kwargs.pop('sender', SENDER_ID)
self.channel = kwargs.pop('channel')
self.sid = kwargs.pop('sid')
self.corrs = kwargs.pop('corrs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingIq.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingIq(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingIq._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingIq._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingIq._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingIq, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_IQ_DEP_B = 0x002C
class MsgTrackingIqDepB(SBP):
"""SBP class for message MSG_TRACKING_IQ_DEP_B (0x002C).
You can have MSG_TRACKING_IQ_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
When enabled, a tracking channel can output the correlations at each
update interval.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
channel : int
Tracking channel of origin
sid : GnssSignal
GNSS signal identifier
corrs : array
Early, Prompt and Late correlations
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'channel' / construct.Int8ul,
'sid' / construct.Struct(GnssSignal._parser),
'corrs' / construct.Array(3, construct.Byte),)
__slots__ = [
'channel',
'sid',
'corrs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingIqDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingIqDepB, self).__init__()
self.msg_type = SBP_MSG_TRACKING_IQ_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.channel = kwargs.pop('channel')
self.sid = kwargs.pop('sid')
self.corrs = kwargs.pop('corrs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingIqDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingIqDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingIqDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingIqDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingIqDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingIqDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_IQ_DEP_A = 0x001C
class MsgTrackingIqDepA(SBP):
"""SBP class for message MSG_TRACKING_IQ_DEP_A (0x001C).
You can have MSG_TRACKING_IQ_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
channel : int
Tracking channel of origin
sid : GnssSignalDep
GNSS signal identifier
corrs : array
Early, Prompt and Late correlations
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
'channel' / construct.Int8ul,
'sid' / construct.Struct(GnssSignalDep._parser),
'corrs' / construct.Array(3, construct.Byte),)
__slots__ = [
'channel',
'sid',
'corrs',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingIqDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingIqDepA, self).__init__()
self.msg_type = SBP_MSG_TRACKING_IQ_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.channel = kwargs.pop('channel')
self.sid = kwargs.pop('sid')
self.corrs = kwargs.pop('corrs')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingIqDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingIqDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingIqDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingIqDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingIqDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingIqDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_STATE_DEP_A = 0x0016
class MsgTrackingStateDepA(SBP):
"""SBP class for message MSG_TRACKING_STATE_DEP_A (0x0016).
You can have MSG_TRACKING_STATE_DEP_A inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
states : array
Satellite tracking channel state
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
construct.GreedyRange('states' / construct.Struct(TrackingChannelStateDepA._parser)),)
__slots__ = [
'states',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingStateDepA,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingStateDepA, self).__init__()
self.msg_type = SBP_MSG_TRACKING_STATE_DEP_A
self.sender = kwargs.pop('sender', SENDER_ID)
self.states = kwargs.pop('states')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingStateDepA.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingStateDepA(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingStateDepA._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingStateDepA._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingStateDepA._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingStateDepA, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
SBP_MSG_TRACKING_STATE_DEP_B = 0x0013
class MsgTrackingStateDepB(SBP):
"""SBP class for message MSG_TRACKING_STATE_DEP_B (0x0013).
You can have MSG_TRACKING_STATE_DEP_B inherit its fields directly
from an inherited SBP object, or construct it inline using a dict
of its fields.
Deprecated.
Parameters
----------
sbp : SBP
SBP parent object to inherit from.
states : array
Signal tracking channel state
sender : int
Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
"""
_parser = construct.Struct(
construct.GreedyRange('states' / construct.Struct(TrackingChannelStateDepB._parser)),)
__slots__ = [
'states',
]
def __init__(self, sbp=None, **kwargs):
if sbp:
super( MsgTrackingStateDepB,
self).__init__(sbp.msg_type, sbp.sender, sbp.length,
sbp.payload, sbp.crc)
self.from_binary(sbp.payload)
else:
super( MsgTrackingStateDepB, self).__init__()
self.msg_type = SBP_MSG_TRACKING_STATE_DEP_B
self.sender = kwargs.pop('sender', SENDER_ID)
self.states = kwargs.pop('states')
def __repr__(self):
return fmt_repr(self)
@staticmethod
def from_json(s):
"""Given a JSON-encoded string s, build a message object.
"""
d = json.loads(s)
return MsgTrackingStateDepB.from_json_dict(d)
@staticmethod
def from_json_dict(d):
sbp = SBP.from_json_dict(d)
return MsgTrackingStateDepB(sbp, **d)
def from_binary(self, d):
"""Given a binary payload d, update the appropriate payload fields of
the message.
"""
p = MsgTrackingStateDepB._parser.parse(d)
for n in self.__class__.__slots__:
setattr(self, n, getattr(p, n))
def to_binary(self):
"""Produce a framed/packed SBP message.
"""
c = containerize(exclude_fields(self))
self.payload = MsgTrackingStateDepB._parser.build(c)
return self.pack()
def into_buffer(self, buf, offset):
"""Produce a framed/packed SBP message into the provided buffer and offset.
"""
self.payload = containerize(exclude_fields(self))
self.parser = MsgTrackingStateDepB._parser
self.stream_payload.reset(buf, offset)
return self.pack_into(buf, offset, self._build_payload)
def to_json_dict(self):
self.to_binary()
d = super( MsgTrackingStateDepB, self).to_json_dict()
j = walk_json_dict(exclude_fields(self))
d.update(j)
return d
msg_classes = {
0x0021: MsgTrackingStateDetailedDepA,
0x0011: MsgTrackingStateDetailedDep,
0x0041: MsgTrackingState,
0x0061: MsgMeasurementState,
0x002D: MsgTrackingIq,
0x002C: MsgTrackingIqDepB,
0x001C: MsgTrackingIqDepA,
0x0016: MsgTrackingStateDepA,
0x0013: MsgTrackingStateDepB,
}
| 28.735123
| 105
| 0.643963
| 4,846
| 39,597
| 5.05097
| 0.069955
| 0.029048
| 0.013237
| 0.01434
| 0.834539
| 0.828288
| 0.821342
| 0.811987
| 0.802018
| 0.792948
| 0
| 0.009529
| 0.255272
| 39,597
| 1,378
| 106
| 28.735123
| 0.820509
| 0.314898
| 0
| 0.769337
| 0
| 0
| 0.050583
| 0
| 0
| 0
| 0.004065
| 0
| 0
| 1
| 0.132597
| false
| 0
| 0.006906
| 0.020718
| 0.292818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc08e55e4257c75697ee4b0edbda6a5b7dd03c7b
| 43,308
|
py
|
Python
|
kinow_client/apis/geolocations_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2019-06-26T14:24:54.000Z
|
2019-06-26T14:24:54.000Z
|
kinow_client/apis/geolocations_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | null | null | null |
kinow_client/apis/geolocations_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2018-02-01T10:08:40.000Z
|
2018-02-01T10:08:40.000Z
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 1.4.58
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class GeolocationsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_geoloc_settings(self, type, type_id, **kwargs):
"""
Get geolocation settings for an item
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_geoloc_settings(type, type_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: Item type, available values are: category, subscription, product, video, extract, blogpage, slider, topmenu, homerail (required)
:param int type_id: Item ID (required)
:return: GeolocSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_geoloc_settings_with_http_info(type, type_id, **kwargs)
else:
(data) = self.get_geoloc_settings_with_http_info(type, type_id, **kwargs)
return data
def get_geoloc_settings_with_http_info(self, type, type_id, **kwargs):
"""
Get geolocation settings for an item
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_geoloc_settings_with_http_info(type, type_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: Item type, available values are: category, subscription, product, video, extract, blogpage, slider, topmenu, homerail (required)
:param int type_id: Item ID (required)
:return: GeolocSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['type', 'type_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_geoloc_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'type' is set
if ('type' not in params) or (params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_geoloc_settings`")
# verify the required parameter 'type_id' is set
if ('type_id' not in params) or (params['type_id'] is None):
raise ValueError("Missing the required parameter `type_id` when calling `get_geoloc_settings`")
collection_formats = {}
resource_path = '/geolocations/settings'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'type' in params:
query_params['type'] = params['type']
if 'type_id' in params:
query_params['type_id'] = params['type_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GeolocSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ip_location(self, ip_address, **kwargs):
"""
Get IP location
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ip_location(ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str ip_address: address ip (required)
:return: IPLocation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_ip_location_with_http_info(ip_address, **kwargs)
else:
(data) = self.get_ip_location_with_http_info(ip_address, **kwargs)
return data
def get_ip_location_with_http_info(self, ip_address, **kwargs):
"""
Get IP location
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ip_location_with_http_info(ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str ip_address: address ip (required)
:return: IPLocation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ip_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ip_location" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ip_address' is set
if ('ip_address' not in params) or (params['ip_address'] is None):
raise ValueError("Missing the required parameter `ip_address` when calling `get_ip_location`")
collection_formats = {}
resource_path = '/geolocations/ip'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'ip_address' in params:
query_params['ip_address'] = params['ip_address']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IPLocation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_platform_access_info(self, ip_address, **kwargs):
"""
Get PlatformAccessInfo by ip
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_platform_access_info(ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str ip_address: IP address (required)
:return: PlatformAccessInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_platform_access_info_with_http_info(ip_address, **kwargs)
else:
(data) = self.get_platform_access_info_with_http_info(ip_address, **kwargs)
return data
def get_platform_access_info_with_http_info(self, ip_address, **kwargs):
"""
Get PlatformAccessInfo by ip
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_platform_access_info_with_http_info(ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str ip_address: IP address (required)
:return: PlatformAccessInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ip_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_platform_access_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ip_address' is set
if ('ip_address' not in params) or (params['ip_address'] is None):
raise ValueError("Missing the required parameter `ip_address` when calling `get_platform_access_info`")
collection_formats = {}
resource_path = '/geolocations/platform-access'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'ip_address' in params:
query_params['ip_address'] = params['ip_address']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PlatformAccessInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_geolocations(self, product_id, **kwargs):
"""
Get product geolocation restrictions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_geolocations(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:return: Geolocs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_geolocations_with_http_info(product_id, **kwargs)
else:
(data) = self.get_product_geolocations_with_http_info(product_id, **kwargs)
return data
def get_product_geolocations_with_http_info(self, product_id, **kwargs):
"""
Get product geolocation restrictions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_geolocations_with_http_info(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:return: Geolocs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_geolocations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `get_product_geolocations`")
collection_formats = {}
resource_path = '/products/{product_id}/geolocations'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Geolocs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_geolocations_by_ip(self, product_id, ip_address, **kwargs):
"""
Check product access using geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_geolocations_by_ip(product_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param str ip_address: address ip (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_geolocations_by_ip_with_http_info(product_id, ip_address, **kwargs)
else:
(data) = self.get_product_geolocations_by_ip_with_http_info(product_id, ip_address, **kwargs)
return data
def get_product_geolocations_by_ip_with_http_info(self, product_id, ip_address, **kwargs):
"""
Check product access using geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_geolocations_by_ip_with_http_info(product_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param str ip_address: address ip (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'ip_address', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_geolocations_by_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `get_product_geolocations_by_ip`")
# verify the required parameter 'ip_address' is set
if ('ip_address' not in params) or (params['ip_address'] is None):
raise ValueError("Missing the required parameter `ip_address` when calling `get_product_geolocations_by_ip`")
collection_formats = {}
resource_path = '/products/{product_id}/geolocations'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
if 'ip_address' in params:
form_params.append(('ip_address', params['ip_address']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_video_geolocation_by_ip(self, video_id, ip_address, **kwargs):
"""
Check access to a video by geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation_by_ip(video_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param str ip_address: IP address (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, **kwargs)
else:
(data) = self.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, **kwargs)
return data
def get_video_geolocation_by_ip_with_http_info(self, video_id, ip_address, **kwargs):
"""
Check access to a video by geolocation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_video_geolocation_by_ip_with_http_info(video_id, ip_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param str ip_address: IP address (required)
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'ip_address', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_video_geolocation_by_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `get_video_geolocation_by_ip`")
# verify the required parameter 'ip_address' is set
if ('ip_address' not in params) or (params['ip_address'] is None):
raise ValueError("Missing the required parameter `ip_address` when calling `get_video_geolocation_by_ip`")
collection_formats = {}
resource_path = '/videos/{video_id}/geolocations/{ip_address}'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
if 'ip_address' in params:
path_params['ip_address'] = params['ip_address']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_product_geolocation(self, product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for products by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_product_geolocation(product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_product_geolocation_with_http_info(product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
else:
(data) = self.set_product_geolocation_with_http_info(product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
return data
def set_product_geolocation_with_http_info(self, product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for products by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_product_geolocation_with_http_info(product_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:param int page:
:param int per_page:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'enabled', 'behavior_detected_countries', 'behavior_non_detected_countries', 'countries', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_product_geolocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `set_product_geolocation`")
# verify the required parameter 'enabled' is set
if ('enabled' not in params) or (params['enabled'] is None):
raise ValueError("Missing the required parameter `enabled` when calling `set_product_geolocation`")
# verify the required parameter 'behavior_detected_countries' is set
if ('behavior_detected_countries' not in params) or (params['behavior_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_detected_countries` when calling `set_product_geolocation`")
# verify the required parameter 'behavior_non_detected_countries' is set
if ('behavior_non_detected_countries' not in params) or (params['behavior_non_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_non_detected_countries` when calling `set_product_geolocation`")
collection_formats = {}
resource_path = '/products/{product_id}/geolocations'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
if 'countries' in params:
form_params.append(('countries', params['countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'enabled' in params:
form_params.append(('enabled', params['enabled']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_detected_countries' in params:
form_params.append(('behavior_detected_countries', params['behavior_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_non_detected_countries' in params:
form_params.append(('behavior_non_detected_countries', params['behavior_non_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_video_geolocation(self, video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for videos by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_video_geolocation(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
else:
(data) = self.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs)
return data
def set_video_geolocation_with_http_info(self, video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, **kwargs):
"""
Handle geolocation for videos by countries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_video_geolocation_with_http_info(video_id, enabled, behavior_detected_countries, behavior_non_detected_countries, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int video_id: Video ID to fetch (required)
:param int enabled: Enabled (required)
:param str behavior_detected_countries: Behavior for detected countries (required)
:param str behavior_non_detected_countries: Behavior for non-detected countries (required)
:param str countries: IDs of the non-detected countries separated by comma
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['video_id', 'enabled', 'behavior_detected_countries', 'behavior_non_detected_countries', 'countries']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_video_geolocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'video_id' is set
if ('video_id' not in params) or (params['video_id'] is None):
raise ValueError("Missing the required parameter `video_id` when calling `set_video_geolocation`")
# verify the required parameter 'enabled' is set
if ('enabled' not in params) or (params['enabled'] is None):
raise ValueError("Missing the required parameter `enabled` when calling `set_video_geolocation`")
# verify the required parameter 'behavior_detected_countries' is set
if ('behavior_detected_countries' not in params) or (params['behavior_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_detected_countries` when calling `set_video_geolocation`")
# verify the required parameter 'behavior_non_detected_countries' is set
if ('behavior_non_detected_countries' not in params) or (params['behavior_non_detected_countries'] is None):
raise ValueError("Missing the required parameter `behavior_non_detected_countries` when calling `set_video_geolocation`")
collection_formats = {}
resource_path = '/videos/{video_id}/geolocations'.replace('{format}', 'json')
path_params = {}
if 'video_id' in params:
path_params['video_id'] = params['video_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'countries' in params:
form_params.append(('countries', params['countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'enabled' in params:
form_params.append(('enabled', params['enabled']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_detected_countries' in params:
form_params.append(('behavior_detected_countries', params['behavior_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
if 'behavior_non_detected_countries' in params:
form_params.append(('behavior_non_detected_countries', params['behavior_non_detected_countries']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.121406
| 174
| 0.598434
| 4,561
| 43,308
| 5.414821
| 0.043412
| 0.051828
| 0.032393
| 0.023323
| 0.967972
| 0.951087
| 0.941693
| 0.932259
| 0.920314
| 0.914807
| 0
| 0.000238
| 0.321557
| 43,308
| 938
| 175
| 46.170576
| 0.84031
| 0.30992
| 0
| 0.782241
| 1
| 0
| 0.219088
| 0.08856
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035941
| false
| 0
| 0.014799
| 0
| 0.103594
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc1ada8c6088cf7f4a6d8704da811a1be3d20cae
| 88,529
|
py
|
Python
|
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 5
|
2017-07-20T11:15:10.000Z
|
2020-04-16T15:42:55.000Z
|
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 8
|
2020-06-18T17:31:19.000Z
|
2022-03-02T08:32:03.000Z
|
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 12
|
2017-05-17T09:48:01.000Z
|
2021-08-05T19:01:25.000Z
|
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
from ambari_commons import OSCheck
'''
import json
import os
import tempfile
import time
from stacks.utils.RMFTestCase import *
from mock.mock import MagicMock, patch, call
from resource_management.libraries.script.script import Script
from resource_management.core import shell
from resource_management.core.exceptions import Fail
@patch.object(Script, 'format_package_name', new = MagicMock())
class TestNamenode(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
STACK_VERSION = "2.0.6"
DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"NAMENODE"}
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "configure",
config_file = "default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default_alt_fs(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "altfs_plus_hdfs.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0,"")],
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf namenode -format -nonInteractive',
path = ['/usr/bin'],
user = 'hdfs',
logoutput = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'wasb://abc@c6401.ambari.apache.org',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
owner = 'hdfs',
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'wasb://abc@c6401.ambari.apache.org',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'wasb://abc@c6401.ambari.apache.org',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
def test_install_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "install",
config_file = "default_no_install.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
try_install=True
)
self.assert_configure_default()
self.assertNoMoreResources()
pass
def test_start_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0,"")],
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf namenode -format -nonInteractive',
path = ['/usr/bin'],
user = 'hdfs',
logoutput = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
create_parents = True,
group = 'hadoop'
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
create_parents = True,
group = 'hadoop'
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
owner = 'hdfs',
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
owner = 'ambari-qa',
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "stop",
config_file = "default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "configure",
config_file = "secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured(False)
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0,"")],
)
self.assert_configure_secured(False)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf namenode -format -nonInteractive',
path = ['/usr/bin'],
user = 'hdfs',
logoutput = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
user='hdfs',
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
hadoop_bin_dir = '/usr/bin',
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
owner = 'hdfs',
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0777
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
hadoop_bin_dir = '/usr/bin',
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0770
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
def test_stop_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "stop",
config_file = "secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
self.assertNoMoreResources()
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_default(self, get_namenode_states_mock):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "ha_default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@patch.object(time, "sleep")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_default_active_with_retry(self, get_namenode_states_mock, sleep_mock):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.side_effect = [([], [], active_namenodes), (active_namenodes, standby_namenodes, unknown_namenodes)]
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "ha_default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
self.assertTrue(get_namenode_states_mock.called)
self.assertEqual(2, get_namenode_states_mock.call_count)
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_secured(self, get_namenode_states_mock):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file = "ha_secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured(True)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
user = 'hdfs',
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
# tests namenode start command when NameNode HA is enabled, and
# the HA cluster is started initially, rather than using the UI Wizard
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_bootstrap_active_from_blueprint(self, get_namenode_states_mock):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file="ha_bootstrap_active_node.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
# verify that active namenode was formatted
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf namenode -format -nonInteractive',
path = ['/usr/bin'],
user = 'hdfs',
logoutput = True,
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
tries=115,
try_sleep=10,
user="hdfs",
logoutput=True
)
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = None,
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
# tests namenode start command when NameNode HA is enabled, and
# the HA cluster is started initially, rather than using the UI Wizard
# this test verifies the startup of a "standby" namenode
@patch.object(shell, "call")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_bootstrap_standby_from_blueprint(self, get_namenode_states_mock, call_mocks):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
call_mocks = MagicMock(return_value=(0,""))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file="ha_bootstrap_standby_node.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = call_mocks
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-bootstrapped/',
create_parents = True
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
# TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
# Need to update the test case to verify that the standby case is detected, and that the bootstrap
# command is run before the namenode launches
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertNoMoreResources()
self.assertTrue(call_mocks.called)
self.assertEqual(1, call_mocks.call_count)
calls = [
call('hdfs namenode -bootstrapStandby -nonInteractive', logoutput=False, user=u'hdfs')]
call_mocks.assert_has_calls(calls, any_order=False)
# tests namenode start command when NameNode HA is enabled, and
# the HA cluster is started initially, rather than using the UI Wizard
# this test verifies the startup of a "standby" namenode
@patch.object(shell, "call")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_bootstrap_standby_from_blueprint_initial_start(self, get_namenode_states_mock, call_mocks):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
call_mocks = MagicMock()
call_mocks.side_effect = [(1, None), (0, None), (0, None)]
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file="ha_bootstrap_standby_node_initial_start.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = call_mocks
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-bootstrapped/',
create_parents = True
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
# TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
# Need to update the test case to verify that the standby case is detected, and that the bootstrap
# command is run before the namenode launches
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertNoMoreResources()
self.assertTrue(call_mocks.called)
self.assertEqual(2, call_mocks.call_count)
calls = [
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
call_mocks.assert_has_calls(calls, any_order=True)
@patch.object(shell, "call")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_start_ha_bootstrap_standby_from_blueprint_initial_start_dfs_nameservices(self, get_namenode_states_mock, call_mocks):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
call_mocks = MagicMock()
call_mocks.side_effect = [(1, None), (0, None), (0, None)]
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
config_file="ha_bootstrap_standby_node_initial_start_dfs_nameservices.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = call_mocks
)
self.assert_configure_default()
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-bootstrapped/',
create_parents = True
)
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
mode = 0755
)
# TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
# Need to update the test case to verify that the standby case is detected, and that the bootstrap
# command is run before the namenode launches
self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
)
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
action = ['delete'],
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
)
self.assertNoMoreResources()
self.assertTrue(call_mocks.called)
self.assertEqual(2, call_mocks.call_count)
calls = [
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
call_mocks.assert_has_calls(calls, any_order=True)
def test_decommission_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "decommission",
config_file = "default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', '', user = 'hdfs')
self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
user = 'hdfs',
conf_dir = '/etc/hadoop/conf',
bin_dir = '/usr/bin')
self.assertNoMoreResources()
def test_decommission_update_files_only(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "decommission",
config_file = "default_update_exclude_file_only.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertNoMoreResources()
def test_decommission_ha_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "decommission",
config_file = "ha_default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', '', user = 'hdfs')
self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
user = 'hdfs',
conf_dir = '/etc/hadoop/conf',
bin_dir = '/usr/bin')
self.assertNoMoreResources()
def test_decommission_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "decommission",
config_file = "secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
group = 'hadoop',
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
user = 'hdfs',
)
self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
bin_dir = '/usr/bin',
conf_dir = '/etc/hadoop/conf',
user = 'hdfs',
)
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
create_parents = True,
)
self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
create_parents = True,
)
self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
to = '/usr/lib/hadoop/lib/libsnappy.so',
)
self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
to = '/usr/lib/hadoop/lib64/libsnappy.so',
)
self.assertResourceCalled('Directory', '/etc/security/limits.d',
owner = 'root',
group = 'root',
create_parents = True,
)
self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
content = Template('hdfs.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
mode = 0644
)
self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
content = Template('slaves.j2'),
owner = 'hdfs',
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access='a'
)
def assert_configure_secured(self, ha_enabled):
self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
create_parents = True,
)
self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
create_parents = True,
)
self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
to = '/usr/lib/hadoop/lib/libsnappy.so',
)
self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
to = '/usr/lib/hadoop/lib64/libsnappy.so',
)
self.assertResourceCalled('Directory', '/etc/security/limits.d',
owner = 'root',
group = 'root',
create_parents = True,
)
self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
content = Template('hdfs.conf.j2'),
owner = 'root',
group = 'root',
mode = 0644,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
content = Template('hdfs_dn_jaas.conf.j2'),
owner = 'hdfs',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
content = Template('hdfs_nn_jaas.conf.j2'),
owner = 'hdfs',
group = 'hadoop',
)
if ha_enabled:
self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_jn_jaas.conf',
content = Template('hdfs_jn_jaas.conf.j2'),
owner = 'hdfs',
group = 'hadoop',
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
mode = 0644
)
self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
content = Template('slaves.j2'),
owner = 'root',
)
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
owner = 'hdfs',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access='a'
)
@patch("hdfs_rebalance.is_balancer_running")
@patch("resource_management.libraries.script.Script.put_structured_out")
def test_rebalance_hdfs(self, pso, hdfs_rebalance_mock):
hdfs_rebalance_mock.return_value = False
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "rebalancehdfs",
config_file = "rebalancehdfs_default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
wait_for_finish=False
)
self.assertNoMoreResources()
@patch("hdfs_rebalance.is_balancer_running")
@patch("resource_management.libraries.script.Script.put_structured_out")
@patch("os.system")
def test_rebalance_secured_hdfs(self, pso, system_mock, hdfs_rebalance_mock):
system_mock.return_value = -1
hdfs_rebalance_mock.return_value = False
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "rebalancehdfs",
config_file = "rebalancehdfs_secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks=[(1, "no kinit")]
)
tempdir = tempfile.gettempdir()
ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_676e87466798ee1b4128732da3effe26e7dfc902e2c9ebdfde4331d2")
kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
self.assertResourceCalled('Execute', kinit_cmd,
user = 'hdfs',
)
self.assertResourceCalled('Execute', rebalance_cmd,
wait_for_finish=False
)
self.assertNoMoreResources()
@patch("os.path.isfile")
def test_ranger_installed_missing_file(self, isfile_mock):
"""
Tests that when Ranger is enabled for HDFS, that an exception is thrown
if there is no install.properties found
:return:
"""
isfile_mock.return_value = False
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
self.fail("Expected a failure since the ranger install.properties was missing")
except Fail, failure:
pass
self.assertTrue(isfile_mock.called)
@patch.object(time, "sleep")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_upgrade_restart(self, get_namenode_states_mock, sleep_mock):
# Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
# was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
# Just executing the script is enough to test the fix
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "restart",
config_file = "nn_ru_lzo.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
# now return unknown and ensure that we cannot proceed with the upgrade since we
# _must_ wait for Safemode to be done
unknown_namenodes = active_namenodes
active_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "restart",
config_file = "nn_ru_lzo.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
raise Fail("Expected a failure due to the inability to determine Active/Standby NameNode and Safemode detection")
except:
pass
self.assertFalse(0 == len(Script.structuredOut))
self.assertTrue(Script.structuredOut.has_key("upgrade_type"))
self.assertTrue(Script.structuredOut.has_key("direction"))
self.assertEquals("rolling_upgrade", Script.structuredOut["upgrade_type"])
self.assertEquals("UPGRADE", Script.structuredOut["direction"])
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_upgrade_restart_eu(self, get_namenode_states_mock):
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
mocks_dict = {}
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "restart",
config_file = "nn_eu_standby.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
mocks_dict=mocks_dict)
@patch("hdfs_namenode.is_this_namenode_active")
@patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
@patch("utils.get_namenode_states")
@patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
is_active_nn_mock.return_value = True
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_eu.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.4.0-1111'
json_content['commandParams']['version'] = version
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
mocks_dict = {}
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
command_args=["nonrolling"],
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None, ''), (0, None)],
mocks_dict=mocks_dict)
self.assertTrue(setup_ranger_plugin_mock.called)
self.assertResourceCalledByIndex(7, 'Execute',
('mv', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh.bak'),
only_if='test -f /usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh',
sudo=True)
def test_pre_upgrade_restart(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.2.1.0-3242'
json_content['commandParams']['version'] = version
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "pre_upgrade_restart",
config_dict = json_content,
config_overrides = self.CONFIG_OVERRIDES,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute',
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
self.assertNoMoreResources()
@patch("resource_management.core.shell.call")
def test_pre_upgrade_restart_23(self, call_mock):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.0.0-1234'
json_content['commandParams']['version'] = version
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "pre_upgrade_restart",
config_dict = json_content,
config_overrides = self.CONFIG_OVERRIDES,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
mocks_dict = mocks_dict)
self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
self.assertNoMoreResources()
def test_post_upgrade_restart(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "post_upgrade_restart",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -report -live',
user='hdfs',
tries=60,
try_sleep=10
)
self.assertNoMoreResources()
def test_post_upgrade_ha_restart(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "post_upgrade_restart",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -report -live',
user='hdfs',
tries=60,
try_sleep=10
)
self.assertNoMoreResources()
def test_prepare_rolling_upgrade__upgrade(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
json_content = json.load(f)
json_content['commandParams']['upgrade_direction'] = 'upgrade'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "prepare_rolling_upgrade",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
logoutput = True, user = 'hdfs')
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade prepare',
logoutput = True, user = 'hdfs')
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
logoutput = True, user = 'hdfs')
self.assertNoMoreResources()
def test_prepare_rolling_upgrade__upgrade(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_secured.json"
with open(config_file, "r") as f:
json_content = json.load(f)
json_content['commandParams']['upgrade_direction'] = 'upgrade'
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "prepare_rolling_upgrade",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
logoutput = True, user = 'hdfs')
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade prepare',
logoutput = True, user = 'hdfs')
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
logoutput = True, user = 'hdfs')
self.assertNoMoreResources()
@patch.object(shell, "call")
def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
with open(config_file, "r") as f:
json_content = json.load(f)
json_content['commandParams']['upgrade_direction'] = 'downgrade'
# Mock safemode_check call
shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "prepare_rolling_upgrade",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute',
'/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
logoutput = True, user = 'hdfs')
self.assertNoMoreResources()
def test_finalize_rolling_upgrade(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "finalize_rolling_upgrade",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
logoutput = True,
user = 'hdfs',
)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade finalize',
logoutput = True,
user = 'hdfs',
)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
logoutput = True,
user = 'hdfs',
)
self.assertNoMoreResources()
def test_finalize_ha_rolling_upgrade(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "finalize_rolling_upgrade",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
logoutput = True,
user = 'hdfs',
)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade finalize',
logoutput = True,
user = 'hdfs',
)
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
logoutput = True,
user = 'hdfs',
)
self.assertNoMoreResources()
@patch.object(shell, "call")
def test_pre_upgrade_restart_21_and_lower_params(self, call_mock):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
with open(config_file, "r") as f:
json_content = json.load(f)
json_content['hostLevelParams']['stack_name'] = 'HDP'
json_content['hostLevelParams']['stack_version'] = '2.0'
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "pre_upgrade_restart",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
mocks_dict = mocks_dict)
import sys
self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
@patch.object(shell, "call")
@patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
def test_pre_upgrade_restart_22_params(self, call_mock):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.2.0.0-1234'
del json_content['commandParams']['version']
json_content['hostLevelParams']['stack_name'] = 'HDP'
json_content['hostLevelParams']['stack_version'] = '2.2'
json_content['commandParams']['version'] = version
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "pre_upgrade_restart",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
mocks_dict = mocks_dict)
import sys
self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
@patch.object(shell, "call")
def test_pre_upgrade_restart_23_params(self, call_mock):
import itertools
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.0.0-1234'
json_content['commandParams']['version'] = version
json_content['commandParams']['upgrade_direction'] = 'upgrade'
json_content['hostLevelParams']['stack_name'] = 'HDP'
json_content['hostLevelParams']['stack_version'] = '2.3'
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "pre_upgrade_restart",
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
mocks_dict = mocks_dict)
import sys
self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
@patch("namenode_upgrade.create_upgrade_marker", MagicMock())
@patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
def test_express_upgrade_skips_safemode_and_directory_creation(self):
"""
Tests that we wait for Safemode to be OFF no matter what except for EU. And, because of that,
EUs don't try to create HDFS resources.
:param self:
:param create_upgrade_marker_mock:
:return:
"""
config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.0.0-1234'
json_content['commandParams']['version'] = version
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
classname = "NameNode",
command = "start",
command_args = ["nonrolling"],
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None), (0, None, ''), (0, None)],
mocks_dict = mocks_dict)
# jump right to the start of the NN and then verify that we DO NOT call HdfsResource after
self.assertResourceCalledIgnoreEarlier('Execute',
"ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
environment = {'HADOOP_LIBEXEC_DIR':'/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
self.assertNoMoreResources()
@patch.object(time, "sleep")
@patch("resource_management.libraries.functions.namenode_ha_utils.get_namenode_states")
def test_namenode_active_detection_works_with_tuples(self, get_namenode_states_mock, sleep_mock):
"""
Checks to ensure that when detecting the NN state, we take into account that both NNs could
be returned with the same state forcing us to iterate over the tuple to find the right one
"""
import params
from hdfs_namenode import is_this_namenode_active
# mock out the NN ID
params.namenode_id = "nn1"
# first test the singular case
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.assertTrue(is_this_namenode_active())
# now test the harder tuple
active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070'), ('nn2', 'c6402.ambari.apache.org:50070')]
standby_namenodes = []
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.assertTrue(is_this_namenode_active())
# and the negative for good measure
active_namenodes = []
standby_namenodes = [('nn1', 'c6401.ambari.apache.org:50070'), ('nn2', 'c6402.ambari.apache.org:50070')]
unknown_namenodes = []
get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
self.assertFalse(is_this_namenode_active())
class Popen_Mock:
return_value = 1
lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
'']
def __call__(self, *args,**kwargs):
popen = MagicMock()
popen.returncode = Popen_Mock.return_value
popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
return popen
| 50.443875
| 249
| 0.595432
| 9,381
| 88,529
| 5.42149
| 0.056284
| 0.076447
| 0.018404
| 0.021392
| 0.91443
| 0.90165
| 0.895633
| 0.888771
| 0.883757
| 0.87837
| 0
| 0.015897
| 0.285161
| 88,529
| 1,754
| 250
| 50.472634
| 0.787766
| 0.021372
| 0
| 0.781149
| 0
| 0.036798
| 0.311462
| 0.151185
| 0
| 0
| 0
| 0.00057
| 0.156875
| 0
| null | null | 0.001937
| 0.009684
| null | null | 0.002582
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc2f378f9aa7d2ff451974daff7ac3ac5e8716e2
| 394
|
py
|
Python
|
login_universe/__init__.py
|
JASchilz/RoverMUD
|
b99770a7c55cd6951df872793a54bfa260b145f9
|
[
"Apache-2.0"
] | 4
|
2016-01-01T12:06:26.000Z
|
2020-05-04T02:36:57.000Z
|
login_universe/__init__.py
|
JASchilz/RoverMUD
|
b99770a7c55cd6951df872793a54bfa260b145f9
|
[
"Apache-2.0"
] | null | null | null |
login_universe/__init__.py
|
JASchilz/RoverMUD
|
b99770a7c55cd6951df872793a54bfa260b145f9
|
[
"Apache-2.0"
] | null | null | null |
#------------------------------------------------------------------------------
# login_universe/__init__.py
# Copyright 2011 Joseph Schilz
# Licensed under Apache v2
#------------------------------------------------------------------------------
from .universe import init_character
from .universe import backup_data
from .universe import restore_data
from .universe import char_list
| 30.307692
| 79
| 0.477157
| 32
| 394
| 5.59375
| 0.625
| 0.268156
| 0.402235
| 0.24581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.098985
| 394
| 12
| 80
| 32.833333
| 0.490141
| 0.616751
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fc52bb4e6f2407e0222a26e8efccb4191d3627ba
| 73,679
|
py
|
Python
|
Packages/backrefs/st3/backrefs/uniprops/unidata/generalcategory.py
|
aimee5/sublime_packages
|
071e3d0a5892e177d7f93365b20ebccb3f60aedd
|
[
"MIT"
] | 2
|
2018-04-24T10:02:26.000Z
|
2019-06-02T13:53:31.000Z
|
Packages/backrefs/st3/backrefs/uniprops/unidata/generalcategory.py
|
aimee5/sublime_packages
|
071e3d0a5892e177d7f93365b20ebccb3f60aedd
|
[
"MIT"
] | null | null | null |
Packages/backrefs/st3/backrefs/uniprops/unidata/generalcategory.py
|
aimee5/sublime_packages
|
071e3d0a5892e177d7f93365b20ebccb3f60aedd
|
[
"MIT"
] | 2
|
2019-04-11T04:13:02.000Z
|
2019-06-02T13:53:33.000Z
|
"""Unicode Properties from Unicode version 6.1.0 (autogen)."""
from __future__ import unicode_literals
unicode_properties = {
"c": {
"^": "\u0020-\u007e\u00a0-\u00ac\u00ae-\u05ff\u0605-\u06dc\u06de-\u070e\u0710-\u200a\u2010-\u2029\u202f-\u205f\u2065-\u2069\u2070-\ud7ff\ud801-\udb7e\udb81-\udbfe\udc01-\udffe\ue001-\uf8fe\uf900-\ufefe\uff00-\ufff8\ufffc-\U000110bc\U000110be-\U0001d172\U0001d17b-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000effff\U000f0001-\U000ffffc\U000ffffe-\U000fffff\U00100001-\U0010fffc\U0010fffe-\U0010ffff",
"^c": "\u0020-\u007e\u00a0-\U0010ffff",
"^f": "\u0000-\u00ac\u00ae-\u05ff\u0605-\u06dc\u06de-\u070e\u0710-\u200a\u2010-\u2029\u202f-\u205f\u2065-\u2069\u2070-\ufefe\uff00-\ufff8\ufffc-\U000110bc\U000110be-\U0001d172\U0001d17b-\U000e0000\U000e0002-\U000e001f\U000e0080-\U0010ffff",
"^o": "\u0000-\udfff\ue001-\uf8fe\uf900-\U000effff\U000f0001-\U000ffffc\U000ffffe-\U000fffff\U00100001-\U0010fffc\U0010fffe-\U0010ffff",
"^s": "\u0000-\ud7ff\ud801-\udb7e\udb81-\udbfe\udc01-\udffe\ue000-\U0010ffff",
"c": "\u0000-\u001f\u007f-\u009f",
"f": "\u00ad\u0600-\u0604\u06dd\u070f\u200b-\u200f\u202a-\u202e\u2060-\u2064\u206a-\u206f\ufeff\ufff9-\ufffb\U000110bd\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f",
"o": "\ue000\uf8ff\U000f0000\U000ffffd\U00100000\U0010fffd",
"s": "\ud800\udb7f-\udb80\udbff-\udc00\udfff"
},
"l": {
"^": "\u0000-\u0040\u005b-\u0060\u007b-\u00a9\u00ab-\u00b4\u00b6-\u00b9\u00bb-\u00bf\u00d7\u00f7\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u036f\u0375\u0378-\u0379\u037e-\u0385\u0387\u038b\u038d\u03a2\u03f6\u0482-\u0489\u0528-\u0530\u0557-\u0558\u055a-\u0560\u0588-\u05cf\u05eb-\u05ef\u05f3-\u061f\u064b-\u066d\u0670\u06d4\u06d6-\u06e4\u06e7-\u06ed\u06f0-\u06f9\u06fd-\u06fe\u0700-\u070f\u0711\u0730-\u074c\u07a6-\u07b0\u07b2-\u07c9\u07eb-\u07f3\u07f6-\u07f9\u07fb-\u07ff\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u083f\u0859-\u089f\u08a1\u08ad-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962-\u0970\u0978\u0980-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bc\u09be-\u09cd\u09cf-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0acf\u0ad1-\u0adf\u0ae2-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b70\u0b72-\u0b82\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bcf\u0bd1-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3c\u0c3e-\u0c57\u0c5a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbc\u0cbe-\u0cdd\u0cdf\u0ce2-\u0cf0\u0cf3-\u0d04\u0d0d\u0d11\u0d3b-\u0d3c\u0d3e-\u0d4d\u0d4f-\u0d5f\u0d62-\u0d79\u0d80-\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0e00\u0e31\u0e34-\u0e3f\u0e47-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5\u0ec7-\u0edb\u0ee0-\u0eff\u0f01-\u0f3f\u0f48\u0f6d-\u0f87\u0f8d-\u0fff\u102b-\u103e\u1040-\u104f\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109f\u10c6\u10c8-\u10cc\u10ce-\u10cf\u10fb\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u137f\u1390-\u139f\u13f5-\u1400\u166d-\u166e\u1680\u169b-\u169f\u16eb-\u16ff\u170d\u1712-\u171f\u1732-\u173f\u1752-\u175f\u176d\u1771-\u177f\u17b4-\u17d6\u17d8-\u17db\u17dd-\u181f\u1878-\u187f\u18a9\u18ab-\u18af\u18f6-\u18ff\u191d-\u194f\u196e-\u196f\u1975-\u197f\u19ac-\u19c0\u19c8-\u19ff\u1a17-\u1a1f\u1a55-\u1aa6\u1aa8-\u1b04\u1b34-\u1b44\u1b4c-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bff\u1c24-\u1c4c\u1c50-\u1c59\u1c7e-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf7-\u1cff\u1dc0-\u1dff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2070\u2072-\u207e\u2080-\u208f\u209d-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f-\u2182\u2185-\u2bff\u2c2f\u2c5f\u2ce5-\u2cea\u2cef-\u2cf1\u2cf4-\u2cff\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d70-\u2d7f\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf-\u2e2e\u2e30-\u3004\u3007-\u3030\u3036-\u303a\u303d-\u3040\u3097-\u309c\u30a0\u30fb\u3100-\u3104\u312e-\u3130\u318f-\u319f\u31bb-\u31ef\u3200-\u33ff\u3401-\u4db4\u4db6-\u4dff\u4e01-\u9fcb\u9fcd-\u9fff\ua48d-\ua4cf\ua4fe-\ua4ff\ua60d-\ua60f\ua620-\ua629\ua62c-\ua63f\ua66f-\ua67e\ua698-\ua69f\ua6e6-\ua716\ua720-\ua721\ua789-\ua78a\ua78f\ua794-\ua79f\ua7ab-\ua7f7\ua802\ua806\ua80b\ua823-\ua83f\ua874-\ua881\ua8b4-\ua8f1\ua8f8-\ua8fa\ua8fc-\ua909\ua926-\ua92f\ua947-\ua95f\ua97d-\ua983\ua9b3-\ua9ce\ua9d0-\ua9ff\uaa29-\uaa3f\uaa43\uaa4c-\uaa5f\uaa77-\uaa79\uaa7b-\uaa7f\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaac3-\uaada\uaade-\uaadf\uaaeb-\uaaf1\uaaf5-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f-\uabbf\uabe3-\uabff\uac01-\ud7a2\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\uf8ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb1e\ufb29\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbb2-\ufbd2\ufd3e-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfc-\ufe6f\ufe75\ufefd-\uff20\uff3b-\uff40\uff5b-\uff65\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102ff\U0001031f-\U0001032f\U00010341\U0001034a-\U0001037f\U0001039e-\U0001039f\U000103c4-\U000103c7\U000103d0-\U000103ff\U0001049e-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856-\U000108ff\U00010916-\U0001091f\U0001093a-\U0001097f\U000109b8-\U000109bd\U000109c0-\U000109ff\U00010a01-\U00010a0f\U00010a14\U00010a18\U00010a34-\U00010a5f\U00010a7d-\U00010aff\U00010b36-\U00010b3f\U00010b56-\U00010b5f\U00010b73-\U00010bff\U00010c49-\U00011002\U00011038-\U00011082\U000110b0-\U000110cf\U000110e9-\U00011102\U00011127-\U00011182\U000111b3-\U000111c0\U000111c5-\U0001167f\U000116ab-\U00011fff\U0001236f-\U00012fff\U0001342f-\U000167ff\U00016a39-\U00016eff\U00016f45-\U00016f4f\U00016f51-\U00016f92\U00016fa0-\U0001afff\U0001b002-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001d7cc-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001ffff\U00020001-\U0002a6d5\U0002a6d7-\U0002a6ff\U0002a701-\U0002b733\U0002b735-\U0002b73f\U0002b741-\U0002b81c\U0002b81e-\U0002f7ff\U0002fa1e-\U0010ffff",
"^c": "\u0000-\u0040\u005b-\u0060\u007b-\u00b4\u00b6-\u00bf\u00d7\u00f7\u01bb\u01c0-\u01c3\u0294\u02b0-\u036f\u0374-\u0375\u0378-\u037a\u037e-\u0385\u0387\u038b\u038d\u03a2\u03f6\u0482-\u0489\u0528-\u0530\u0557-\u0560\u0588-\u109f\u10c6\u10c8-\u10cc\u10ce-\u1cff\u1d2c-\u1d6a\u1d78\u1d9b-\u1dff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u2135-\u2138\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f-\u2182\u2185-\u2bff\u2c2f\u2c5f\u2c7c-\u2c7d\u2ce5-\u2cea\u2cef-\u2cf1\u2cf4-\u2cff\u2d26\u2d28-\u2d2c\u2d2e-\ua63f\ua66e-\ua67f\ua698-\ua721\ua770\ua788-\ua78a\ua78f\ua794-\ua79f\ua7ab-\ua7f9\ua7fb-\ufaff\ufb07-\ufb12\ufb18-\uff20\uff3b-\uff40\uff5b-\U000103ff\U00010450-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001d7cc-\U0010ffff",
"^l": "\u0000-\u0060\u007b-\u00b4\u00b6-\u00de\u00f7\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bb-\u01bc\u01c0-\u01c5\u01c7-\u01c8\u01ca-\u01cb\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1-\u01f2\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0294\u02b0-\u0370\u0372\u0374-\u0376\u0378-\u037a\u037e-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f6-\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u0482-\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528-\u0560\u0588-\u1cff\u1d2c-\u1d6a\u1d78\u1d9b-\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f16-\u1f1f\u1f28-\u1f2f\u1f38-\u1f3f\u1f46-\u1f4f\u1f58-\u1f5f\u1f68-\u1f6f\u1f7e-\u1f7f\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fb5\u1fb8-\u1fbd\u1fbf-\u1fc1\u1fc5\u1fc8-\u1fcf\u1fd4-\u1fd5\u1fd8-\u1fdf\u1fe8-\u1ff1\u1ff5\u1ff8-\u2109\u210b-\u210d\u2110-\u2112\u2114-\u212e\u2130-\u2133\u2135-\u2138\u213a-\u213b\u213e-\u2145\u214a-\u214d\u214f-\u2183\u2185-\u2c2f\u2c5f-\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7c-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ce5-\u2ceb\u2ced\u2cef-\u2cf2\u2cf4-\u2cff\u2d26\u2d28-\u2d2c\u2d2e-\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua66e-\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698-\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua770\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua788-\ua78b\ua78d\ua78f-\ua790\ua792\ua794-\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7f9\ua7fb-\ufaff\ufb07-\ufb12\ufb18-\uff40\uff5b-\U00010427\U00010450-\U0001d419\U0001d434-\U0001d44d\U0001d455\U0001d468-\U0001d481\U0001d49c-\U0001d4b5\U0001d4ba\U0001d4bc\U0001d4c4\U0001d4d0-\U0001d4e9\U0001d504-\U0001d51d\U0001d538-\U0001d551\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a6-\U0001d6c1\U0001d6db\U0001d6e2-\U0001d6fb\U0001d715\U0001d71c-\U0001d735\U0001d74f\U0001d756-\U0001d76f\U0001d789\U0001d790-\U0001d7a9\U0001d7c3\U0001d7ca\U0001d7cc-\U0010ffff",
"^m": "\u0000-\u02af\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u0373\u0375-\u0379\u037b-\u0558\u055a-\u063f\u0641-\u06e4\u06e7-\u07f3\u07f6-\u07f9\u07fb-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u0970\u0972-\u0e45\u0e47-\u0ec5\u0ec7-\u10fb\u10fd-\u17d6\u17d8-\u1842\u1844-\u1aa6\u1aa8-\u1c77\u1c7e-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1dc0-\u2070\u2072-\u207e\u2080-\u208f\u209d-\u2c7b\u2c7e-\u2d6e\u2d70-\u2e2e\u2e30-\u3004\u3006-\u3030\u3036-\u303a\u303c-\u309c\u309f-\u30fb\u30ff-\ua014\ua016-\ua4f7\ua4fe-\ua60b\ua60d-\ua67e\ua680-\ua716\ua720-\ua76f\ua771-\ua787\ua789-\ua7f7\ua7fa-\ua9ce\ua9d0-\uaa6f\uaa71-\uaadc\uaade-\uaaf2\uaaf5-\uff6f\uff71-\uff9d\uffa0-\U00016f92\U00016fa0-\U0010ffff",
"^o": "\u0000-\u00a9\u00ab-\u00b9\u00bb-\u01ba\u01bc-\u01bf\u01c4-\u0293\u0295-\u05cf\u05eb-\u05ef\u05f3-\u061f\u0640\u064b-\u066d\u0670\u06d4\u06d6-\u06ed\u06f0-\u06f9\u06fd-\u06fe\u0700-\u070f\u0711\u0730-\u074c\u07a6-\u07b0\u07b2-\u07c9\u07eb-\u07ff\u0816-\u083f\u0859-\u089f\u08a1\u08ad-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962-\u0971\u0978\u0980-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bc\u09be-\u09cd\u09cf-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0acf\u0ad1-\u0adf\u0ae2-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b70\u0b72-\u0b82\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bcf\u0bd1-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3c\u0c3e-\u0c57\u0c5a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbc\u0cbe-\u0cdd\u0cdf\u0ce2-\u0cf0\u0cf3-\u0d04\u0d0d\u0d11\u0d3b-\u0d3c\u0d3e-\u0d4d\u0d4f-\u0d5f\u0d62-\u0d79\u0d80-\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0e00\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0edb\u0ee0-\u0eff\u0f01-\u0f3f\u0f48\u0f6d-\u0f87\u0f8d-\u0fff\u102b-\u103e\u1040-\u104f\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u10cf\u10fb-\u10fc\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u137f\u1390-\u139f\u13f5-\u1400\u166d-\u166e\u1680\u169b-\u169f\u16eb-\u16ff\u170d\u1712-\u171f\u1732-\u173f\u1752-\u175f\u176d\u1771-\u177f\u17b4-\u17db\u17dd-\u181f\u1843\u1878-\u187f\u18a9\u18ab-\u18af\u18f6-\u18ff\u191d-\u194f\u196e-\u196f\u1975-\u197f\u19ac-\u19c0\u19c8-\u19ff\u1a17-\u1a1f\u1a55-\u1b04\u1b34-\u1b44\u1b4c-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bff\u1c24-\u1c4c\u1c50-\u1c59\u1c78-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf7-\u2134\u2139-\u2d2f\u2d68-\u2d7f\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf-\u3005\u3007-\u303b\u303d-\u3040\u3097-\u309e\u30a0\u30fb-\u30fe\u3100-\u3104\u312e-\u3130\u318f-\u319f\u31bb-\u31ef\u3200-\u33ff\u3401-\u4db4\u4db6-\u4dff\u4e01-\u9fcb\u9fcd-\u9fff\ua015\ua48d-\ua4cf\ua4f8-\ua4ff\ua60c-\ua60f\ua620-\ua629\ua62c-\ua66d\ua66f-\ua69f\ua6e6-\ua7fa\ua802\ua806\ua80b\ua823-\ua83f\ua874-\ua881\ua8b4-\ua8f1\ua8f8-\ua8fa\ua8fc-\ua909\ua926-\ua92f\ua947-\ua95f\ua97d-\ua983\ua9b3-\ua9ff\uaa29-\uaa3f\uaa43\uaa4c-\uaa5f\uaa70\uaa77-\uaa79\uaa7b-\uaa7f\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaac3-\uaada\uaadd-\uaadf\uaaeb-\uaaf1\uaaf3-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f-\uabbf\uabe3-\uabff\uac01-\ud7a2\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\uf8ff\ufa6e-\ufa6f\ufada-\ufb1c\ufb1e\ufb29\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbb2-\ufbd2\ufd3e-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfc-\ufe6f\ufe75\ufefd-\uff65\uff70\uff9e-\uff9f\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102ff\U0001031f-\U0001032f\U00010341\U0001034a-\U0001037f\U0001039e-\U0001039f\U000103c4-\U000103c7\U000103d0-\U0001044f\U0001049e-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856-\U000108ff\U00010916-\U0001091f\U0001093a-\U0001097f\U000109b8-\U000109bd\U000109c0-\U000109ff\U00010a01-\U00010a0f\U00010a14\U00010a18\U00010a34-\U00010a5f\U00010a7d-\U00010aff\U00010b36-\U00010b3f\U00010b56-\U00010b5f\U00010b73-\U00010bff\U00010c49-\U00011002\U00011038-\U00011082\U000110b0-\U000110cf\U000110e9-\U00011102\U00011127-\U00011182\U000111b3-\U000111c0\U000111c5-\U0001167f\U000116ab-\U00011fff\U0001236f-\U00012fff\U0001342f-\U000167ff\U00016a39-\U00016eff\U00016f45-\U00016f4f\U00016f51-\U0001afff\U0001b002-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001ffff\U00020001-\U0002a6d5\U0002a6d7-\U0002a6ff\U0002a701-\U0002b733\U0002b735-\U0002b73f\U0002b741-\U0002b81c\U0002b81e-\U0002f7ff\U0002fa1e-\U0010ffff",
"^t": "\u0000-\u01c4\u01c6-\u01c7\u01c9-\u01ca\u01cc-\u01f1\u01f3-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fbb\u1fbd-\u1fcb\u1fcd-\u1ffb\u1ffd-\U0010ffff",
"^u": "\u0000-\u0040\u005b-\u00bf\u00d7\u00df-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01bb\u01bd-\u01c3\u01c5-\u01c6\u01c8-\u01c9\u01cb-\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f2-\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u036f\u0371\u0373-\u0375\u0377-\u0385\u0387\u038b\u038d\u0390\u03a2\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5-\u03f6\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481-\u0489\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527-\u0530\u0557-\u109f\u10c6\u10c8-\u10cc\u10ce-\u1dff\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f17\u1f1e-\u1f27\u1f30-\u1f37\u1f40-\u1f47\u1f4e-\u1f58\u1f5a\u1f5c\u1f5e\u1f60-\u1f67\u1f70-\u1fb7\u1fbc-\u1fc7\u1fcc-\u1fd7\u1fdc-\u1fe7\u1fed-\u1ff7\u1ffc-\u2101\u2103-\u2106\u2108-\u210a\u210e-\u210f\u2113-\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e-\u212f\u2134-\u213d\u2140-\u2144\u2146-\u2182\u2184-\u2bff\u2c2f-\u2c5f\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7d\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2cea\u2cec\u2cee-\u2cf1\u2cf3-\ua63f\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d-\ua67f\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697-\ua721\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787-\ua78a\ua78c\ua78e-\ua78f\ua791\ua793-\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7ab-\uff20\uff3b-\U000103ff\U00010428-\U0001d3ff\U0001d41a-\U0001d433\U0001d44e-\U0001d467\U0001d482-\U0001d49b\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4b6-\U0001d4cf\U0001d4ea-\U0001d503\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d-\U0001d537\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a7\U0001d6c1-\U0001d6e1\U0001d6fb-\U0001d71b\U0001d735-\U0001d755\U0001d76f-\U0001d78f\U0001d7a9-\U0001d7c9\U0001d7cb-\U0010ffff",
"c": "\u0041-\u005a\u0061-\u007a\u00b5\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u01ba\u01bc-\u01bf\u01c4-\u0293\u0295-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0561-\u0587\u10a0-\u10c5\u10c7\u10cd\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2134\u2139\u213c-\u213f\u2145-\u2149\u214e\u2183-\u2184\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2c7b\u2c7e-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua640-\ua66d\ua680-\ua697\ua722-\ua76f\ua771-\ua787\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff21-\uff3a\uff41-\uff5a\U00010400-\U0001044f\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb",
"l": "\u0061-\u007a\u00b5\u00df-\u00f6\u00f8-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb",
"m": "\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f\U00016f93-\U00016f9f",
"o": "\u00aa\u00ba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05f0-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0977\u0979-\u097f\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58-\u0c59\u0c60-\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10d0-\u10fa\u10fd-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400\u4db5\u4e00\u9fcc\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031e\U00010330-\U00010340\U00010342-\U00010349\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a60-\U00010a7c\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010c00-\U00010c48\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011183-\U000111b2\U000111c1-\U000111c4\U00011680-\U000116aa\U00012000-\U0001236e\U00013000-\U0001342e\U00016800-\U00016a38\U00016f00-\U00016f44\U00016f50\U0001b000-\U0001b001\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000\U0002a6d6\U0002a700\U0002b734\U0002b740\U0002b81d\U0002f800-\U0002fa1d",
"t": "\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc",
"u": "\u0041-\u005a\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3a\U00010400-\U00010427\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca"
},
"m": {
"^": "\u0000-\u02ff\u0370-\u0482\u048a-\u0590\u05be\u05c0\u05c3\u05c6\u05c8-\u060f\u061b-\u064a\u0660-\u066f\u0671-\u06d5\u06dd-\u06de\u06e5-\u06e6\u06e9\u06ee-\u0710\u0712-\u072f\u074b-\u07a5\u07b1-\u07ea\u07f4-\u0815\u081a\u0824\u0828\u082e-\u0858\u085c-\u08e3\u08ff\u0904-\u0939\u093d\u0950\u0958-\u0961\u0964-\u0980\u0984-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09e1\u09e4-\u0a00\u0a04-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a6f\u0a72-\u0a74\u0a76-\u0a80\u0a84-\u0abb\u0abd\u0ac6\u0aca\u0ace-\u0ae1\u0ae4-\u0b00\u0b04-\u0b3b\u0b3d\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b61\u0b64-\u0b81\u0b83-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0c00\u0c04-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c61\u0c64-\u0c81\u0c84-\u0cbb\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0ce1\u0ce4-\u0d01\u0d04-\u0d3d\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d61\u0d64-\u0d81\u0d84-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0df1\u0df4-\u0e30\u0e32-\u0e33\u0e3b-\u0e46\u0e4f-\u0eb0\u0eb2-\u0eb3\u0eba\u0ebd-\u0ec7\u0ece-\u0f17\u0f1a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f40-\u0f70\u0f85\u0f88-\u0f8c\u0f98\u0fbd-\u0fc5\u0fc7-\u102a\u103f-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1090-\u1099\u109e-\u135c\u1360-\u1711\u1715-\u1731\u1735-\u1751\u1754-\u1771\u1774-\u17b3\u17d4-\u17dc\u17de-\u180a\u180e-\u18a8\u18aa-\u191f\u192c-\u192f\u193c-\u19af\u19c1-\u19c7\u19ca-\u1a16\u1a1c-\u1a54\u1a5f\u1a7d-\u1a7e\u1a80-\u1aff\u1b05-\u1b33\u1b45-\u1b6a\u1b74-\u1b7f\u1b83-\u1ba0\u1bae-\u1be5\u1bf4-\u1c23\u1c38-\u1ccf\u1cd3\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1dbf\u1de7-\u1dfb\u1e00-\u20cf\u20f1-\u2cee\u2cf2-\u2d7e\u2d80-\u2ddf\u2e00-\u3029\u3030-\u3098\u309b-\ua66e\ua673\ua67e-\ua69e\ua6a0-\ua6ef\ua6f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua828-\ua87f\ua882-\ua8b3\ua8c5-\ua8df\ua8f2-\ua925\ua92e-\ua946\ua954-\ua97f\ua984-\ua9b2\ua9c1-\uaa28\uaa37-\uaa42\uaa44-\uaa4b\uaa4e-\uaa7a\uaa7c-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2-\uaaea\uaaf0-\uaaf4\uaaf7-\uabe2\uabeb\uabee-\ufb1d\ufb1f-\ufdff\ufe10-\ufe1f\ufe27-\U000101fc\U000101fe-\U00010a00\U00010a04\U00010a07-\U00010a0b\U00010a10-\U00010a37\U00010a3b-\U00010a3e\U00010a40-\U00010fff\U00011003-\U00011037\U00011047-\U0001107f\U00011083-\U000110af\U000110bb-\U000110ff\U00011103-\U00011126\U00011135-\U0001117f\U00011183-\U000111b2\U000111c1-\U000116aa\U000116b8-\U00016f50\U00016f7f-\U00016f8e\U00016f93-\U0001d164\U0001d16a-\U0001d16c\U0001d173-\U0001d17a\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d241\U0001d245-\U000e00ff\U000e01f0-\U0010ffff",
"^c": "\u0000-\u0902\u0904-\u093a\u093c-\u093d\u0941-\u0948\u094d\u0950-\u0981\u0984-\u09bd\u09c1-\u09c6\u09c9-\u09ca\u09cd-\u09d6\u09d8-\u0a02\u0a04-\u0a3d\u0a41-\u0a82\u0a84-\u0abd\u0ac1-\u0ac8\u0aca\u0acd-\u0b01\u0b04-\u0b3d\u0b3f\u0b41-\u0b46\u0b49-\u0b4a\u0b4d-\u0b56\u0b58-\u0bbd\u0bc0\u0bc3-\u0bc5\u0bc9\u0bcd-\u0bd6\u0bd8-\u0c00\u0c04-\u0c40\u0c45-\u0c81\u0c84-\u0cbd\u0cbf\u0cc5-\u0cc6\u0cc9\u0ccc-\u0cd4\u0cd7-\u0d01\u0d04-\u0d3d\u0d41-\u0d45\u0d49\u0d4d-\u0d56\u0d58-\u0d81\u0d84-\u0dce\u0dd2-\u0dd7\u0de0-\u0df1\u0df4-\u0f3d\u0f40-\u0f7e\u0f80-\u102a\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u1055\u1058-\u1061\u1065-\u1066\u106e-\u1082\u1085-\u1086\u108d-\u108e\u1090-\u1099\u109d-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u1922\u1927-\u1928\u192c-\u192f\u1932\u1939-\u19af\u19c1-\u19c7\u19ca-\u1a18\u1a1c-\u1a54\u1a56\u1a58-\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1b03\u1b05-\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b45-\u1b81\u1b83-\u1ba0\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab\u1bae-\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1bf4-\u1c23\u1c2c-\u1c33\u1c36-\u1ce0\u1ce2-\u1cf1\u1cf4-\u302d\u3030-\ua822\ua825-\ua826\ua828-\ua87f\ua882-\ua8b3\ua8c4-\ua951\ua954-\ua982\ua984-\ua9b3\ua9b6-\ua9b9\ua9bc\ua9c1-\uaa2e\uaa31-\uaa32\uaa35-\uaa4c\uaa4e-\uaa7a\uaa7c-\uaaea\uaaec-\uaaed\uaaf0-\uaaf4\uaaf6-\uabe2\uabe5\uabe8\uabeb\uabed-\U00010fff\U00011001\U00011003-\U00011081\U00011083-\U000110af\U000110b3-\U000110b6\U000110b9-\U0001112b\U0001112d-\U00011181\U00011183-\U000111b2\U000111b6-\U000111be\U000111c1-\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7-\U00016f50\U00016f7f-\U0001d164\U0001d167-\U0001d16c\U0001d173-\U0010ffff",
"^e": "\u0000-\u0487\u048a-\u20dc\u20e1\u20e5-\ua66f\ua673-\U0010ffff",
"^n": "\u0000-\u02ff\u0370-\u0482\u0488-\u0590\u05be\u05c0\u05c3\u05c6\u05c8-\u060f\u061b-\u064a\u0660-\u066f\u0671-\u06d5\u06dd-\u06de\u06e5-\u06e6\u06e9\u06ee-\u0710\u0712-\u072f\u074b-\u07a5\u07b1-\u07ea\u07f4-\u0815\u081a\u0824\u0828\u082e-\u0858\u085c-\u08e3\u08ff\u0903-\u0939\u093b\u093d-\u0940\u0949-\u094c\u094e-\u0950\u0958-\u0961\u0964-\u0980\u0982-\u09bb\u09bd-\u09c0\u09c5-\u09cc\u09ce-\u09e1\u09e4-\u0a00\u0a03-\u0a3b\u0a3d-\u0a40\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a6f\u0a72-\u0a74\u0a76-\u0a80\u0a83-\u0abb\u0abd-\u0ac0\u0ac6\u0ac9-\u0acc\u0ace-\u0ae1\u0ae4-\u0b00\u0b02-\u0b3b\u0b3d-\u0b3e\u0b40\u0b45-\u0b4c\u0b4e-\u0b55\u0b57-\u0b61\u0b64-\u0b81\u0b83-\u0bbf\u0bc1-\u0bcc\u0bce-\u0c3d\u0c41-\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c61\u0c64-\u0cbb\u0cbd-\u0cbe\u0cc0-\u0cc5\u0cc7-\u0ccb\u0cce-\u0ce1\u0ce4-\u0d40\u0d45-\u0d4c\u0d4e-\u0d61\u0d64-\u0dc9\u0dcb-\u0dd1\u0dd5\u0dd7-\u0e30\u0e32-\u0e33\u0e3b-\u0e46\u0e4f-\u0eb0\u0eb2-\u0eb3\u0eba\u0ebd-\u0ec7\u0ece-\u0f17\u0f1a-\u0f34\u0f36\u0f38\u0f3a-\u0f70\u0f7f\u0f85\u0f88-\u0f8c\u0f98\u0fbd-\u0fc5\u0fc7-\u102c\u1031\u1038\u103b-\u103c\u103f-\u1057\u105a-\u105d\u1061-\u1070\u1075-\u1081\u1083-\u1084\u1087-\u108c\u108e-\u109c\u109e-\u135c\u1360-\u1711\u1715-\u1731\u1735-\u1751\u1754-\u1771\u1774-\u17b3\u17b6\u17be-\u17c5\u17c7-\u17c8\u17d4-\u17dc\u17de-\u180a\u180e-\u18a8\u18aa-\u191f\u1923-\u1926\u1929-\u1931\u1933-\u1938\u193c-\u1a16\u1a19-\u1a55\u1a57\u1a5f\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1a7d-\u1a7e\u1a80-\u1aff\u1b04-\u1b33\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b6a\u1b74-\u1b7f\u1b82-\u1ba1\u1ba6-\u1ba7\u1baa\u1bac-\u1be5\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1c2b\u1c34-\u1c35\u1c38-\u1ccf\u1cd3\u1ce1\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5-\u1dbf\u1de7-\u1dfb\u1e00-\u20cf\u20dd-\u20e0\u20e2-\u20e4\u20f1-\u2cee\u2cf2-\u2d7e\u2d80-\u2ddf\u2e00-\u3029\u302e-\u3098\u309b-\ua66e\ua670-\ua673\ua67e-\ua69e\ua6a0-\ua6ef\ua6f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua824\ua827-\ua8c3\ua8c5-\ua8df\ua8f2-\ua925\ua92e-\ua946\ua952-\ua97f\ua983-\ua9b2\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\uaa28\uaa2f-\uaa30\uaa33-\uaa34\uaa37-\uaa42\uaa44-\uaa4b\uaa4d-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2-\uaaeb\uaaee-\uaaf5\uaaf7-\uabe4\uabe6-\uabe7\uabe9-\uabec\uabee-\ufb1d\ufb1f-\ufdff\ufe10-\ufe1f\ufe27-\U000101fc\U000101fe-\U00010a00\U00010a04\U00010a07-\U00010a0b\U00010a10-\U00010a37\U00010a3b-\U00010a3e\U00010a40-\U00011000\U00011002-\U00011037\U00011047-\U0001107f\U00011082-\U000110b2\U000110b7-\U000110b8\U000110bb-\U000110ff\U00011103-\U00011126\U0001112c\U00011135-\U0001117f\U00011182-\U000111b5\U000111bf-\U000116aa\U000116ac\U000116ae-\U000116af\U000116b6\U000116b8-\U00016f8e\U00016f93-\U0001d166\U0001d16a-\U0001d17a\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d241\U0001d245-\U000e00ff\U000e01f0-\U0010ffff",
"c": "\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u19b0-\u19c0\u19c8-\u19c9\u1a19-\u1a1b\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1bac-\u1bad\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U000116ac\U000116ae-\U000116af\U000116b6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172",
"e": "\u0488-\u0489\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672",
"n": "\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\U000101fd\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00011001\U00011038-\U00011046\U00011080-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011180-\U00011181\U000111b6-\U000111be\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U00016f8f-\U00016f92\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U000e0100-\U000e01ef"
},
"n": {
"^": "\u0000-\u002f\u003a-\u00b1\u00b4-\u00b8\u00ba-\u00bb\u00bf-\u065f\u066a-\u06ef\u06fa-\u07bf\u07ca-\u0965\u0970-\u09e5\u09f0-\u09f3\u09fa-\u0a65\u0a70-\u0ae5\u0af0-\u0b65\u0b70-\u0b71\u0b78-\u0be5\u0bf3-\u0c65\u0c70-\u0c77\u0c7f-\u0ce5\u0cf0-\u0d65\u0d76-\u0e4f\u0e5a-\u0ecf\u0eda-\u0f1f\u0f34-\u103f\u104a-\u108f\u109a-\u1368\u137d-\u16ed\u16f1-\u17df\u17ea-\u17ef\u17fa-\u180f\u181a-\u1945\u1950-\u19cf\u19db-\u1a7f\u1a8a-\u1a8f\u1a9a-\u1b4f\u1b5a-\u1baf\u1bba-\u1c3f\u1c4a-\u1c4f\u1c5a-\u206f\u2071-\u2073\u207a-\u207f\u208a-\u214f\u2183-\u2184\u218a-\u245f\u249c-\u24e9\u2500-\u2775\u2794-\u2cfc\u2cfe-\u3006\u3008-\u3020\u302a-\u3037\u303b-\u3191\u3196-\u321f\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\ua61f\ua62a-\ua6e5\ua6f0-\ua82f\ua836-\ua8cf\ua8da-\ua8ff\ua90a-\ua9cf\ua9da-\uaa4f\uaa5a-\uabef\uabfa-\uff0f\uff1a-\U00010106\U00010134-\U0001013f\U00010179-\U00010189\U0001018b-\U0001031f\U00010324-\U00010340\U00010342-\U00010349\U0001034b-\U000103d0\U000103d6-\U0001049f\U000104aa-\U00010857\U00010860-\U00010915\U0001091c-\U00010a3f\U00010a48-\U00010a7c\U00010a7f-\U00010b57\U00010b60-\U00010b77\U00010b80-\U00010e5f\U00010e7f-\U00011051\U00011070-\U000110ef\U000110fa-\U00011135\U00011140-\U000111cf\U000111da-\U000116bf\U000116ca-\U000123ff\U00012463-\U0001d35f\U0001d372-\U0001d7cd\U0001d800-\U0001f0ff\U0001f10b-\U0010ffff",
"^d": "\u0000-\u002f\u003a-\u065f\u066a-\u06ef\u06fa-\u07bf\u07ca-\u0965\u0970-\u09e5\u09f0-\u0a65\u0a70-\u0ae5\u0af0-\u0b65\u0b70-\u0be5\u0bf0-\u0c65\u0c70-\u0ce5\u0cf0-\u0d65\u0d70-\u0e4f\u0e5a-\u0ecf\u0eda-\u0f1f\u0f2a-\u103f\u104a-\u108f\u109a-\u17df\u17ea-\u180f\u181a-\u1945\u1950-\u19cf\u19da-\u1a7f\u1a8a-\u1a8f\u1a9a-\u1b4f\u1b5a-\u1baf\u1bba-\u1c3f\u1c4a-\u1c4f\u1c5a-\ua61f\ua62a-\ua8cf\ua8da-\ua8ff\ua90a-\ua9cf\ua9da-\uaa4f\uaa5a-\uabef\uabfa-\uff0f\uff1a-\U0001049f\U000104aa-\U00011065\U00011070-\U000110ef\U000110fa-\U00011135\U00011140-\U000111cf\U000111da-\U000116bf\U000116ca-\U0001d7cd\U0001d800-\U0010ffff",
"^l": "\u0000-\u16ed\u16f1-\u215f\u2183-\u2184\u2189-\u3006\u3008-\u3020\u302a-\u3037\u303b-\ua6e5\ua6f0-\U0001013f\U00010175-\U00010340\U00010342-\U00010349\U0001034b-\U000103d0\U000103d6-\U000123ff\U00012463-\U0010ffff",
"^o": "\u0000-\u00b1\u00b4-\u00b8\u00ba-\u00bb\u00bf-\u09f3\u09fa-\u0b71\u0b78-\u0bef\u0bf3-\u0c77\u0c7f-\u0d6f\u0d76-\u0f29\u0f34-\u1368\u137d-\u17ef\u17fa-\u19d9\u19db-\u206f\u2071-\u2073\u207a-\u207f\u208a-\u214f\u2160-\u2188\u218a-\u245f\u249c-\u24e9\u2500-\u2775\u2794-\u2cfc\u2cfe-\u3191\u3196-\u321f\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\ua82f\ua836-\U00010106\U00010134-\U00010174\U00010179-\U00010189\U0001018b-\U0001031f\U00010324-\U00010857\U00010860-\U00010915\U0001091c-\U00010a3f\U00010a48-\U00010a7c\U00010a7f-\U00010b57\U00010b60-\U00010b77\U00010b80-\U00010e5f\U00010e7f-\U00011051\U00011066-\U0001d35f\U0001d372-\U0001f0ff\U0001f10b-\U0010ffff",
"d": "\u0030-\u0039\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000116c0-\U000116c9\U0001d7ce-\U0001d7ff",
"l": "\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U00012462",
"o": "\u00b2-\u00b3\u00b9\u00bc-\u00be\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d70-\u0d75\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a\U00010320-\U00010323\U00010858-\U0001085f\U00010916-\U0001091b\U00010a40-\U00010a47\U00010a7d-\U00010a7e\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010e60-\U00010e7e\U00011052-\U00011065\U0001d360-\U0001d371\U0001f100-\U0001f10a"
},
"p": {
"^": "\u0000-\u0020\u0024\u002b\u0030-\u0039\u003c-\u003e\u0041-\u005a\u005e\u0060-\u007a\u007c\u007e-\u00a0\u00a2-\u00a6\u00a8-\u00aa\u00ac-\u00b5\u00b8-\u00ba\u00bc-\u00be\u00c0-\u037d\u037f-\u0386\u0388-\u0559\u0560-\u0588\u058b-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7-\u05f2\u05f5-\u0608\u060b\u060e-\u061a\u061c-\u061d\u0620-\u0669\u066e-\u06d3\u06d5-\u06ff\u070e-\u07f6\u07fa-\u082f\u083f-\u085d\u085f-\u0963\u0966-\u096f\u0971-\u0aef\u0af1-\u0df3\u0df5-\u0e4e\u0e50-\u0e59\u0e5c-\u0f03\u0f13\u0f15-\u0f39\u0f3e-\u0f84\u0f86-\u0fcf\u0fd5-\u0fd8\u0fdb-\u1049\u1050-\u10fa\u10fc-\u135f\u1369-\u13ff\u1401-\u166c\u166f-\u169a\u169d-\u16ea\u16ee-\u1734\u1737-\u17d3\u17d7\u17db-\u17ff\u180b-\u1943\u1946-\u1a1d\u1a20-\u1a9f\u1aa7\u1aae-\u1b59\u1b61-\u1bfb\u1c00-\u1c3a\u1c40-\u1c7d\u1c80-\u1cbf\u1cc8-\u1cd2\u1cd4-\u200f\u2028-\u202f\u2044\u2052\u205f-\u207c\u207f-\u208c\u208f-\u2328\u232b-\u2767\u2776-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2cf8\u2cfd\u2d00-\u2d6f\u2d71-\u2dff\u2e2f\u2e3c-\u3000\u3004-\u3007\u3012-\u3013\u3020-\u302f\u3031-\u303c\u303e-\u309f\u30a1-\u30fa\u30fc-\ua4fd\ua500-\ua60c\ua610-\ua672\ua674-\ua67d\ua67f-\ua6f1\ua6f8-\ua873\ua878-\ua8cd\ua8d0-\ua8f7\ua8fb-\ua92d\ua930-\ua95e\ua960-\ua9c0\ua9ce-\ua9dd\ua9e0-\uaa5b\uaa60-\uaadd\uaae0-\uaaef\uaaf2-\uabea\uabec-\ufd3d\ufd40-\ufe0f\ufe1a-\ufe2f\ufe53\ufe62\ufe64-\ufe67\ufe69\ufe6c-\uff00\uff04\uff0b\uff10-\uff19\uff1c-\uff1e\uff21-\uff3a\uff3e\uff40-\uff5a\uff5c\uff5e\uff66-\U000100ff\U00010103-\U0001039e\U000103a0-\U000103cf\U000103d1-\U00010856\U00010858-\U0001091e\U00010920-\U0001093e\U00010940-\U00010a4f\U00010a59-\U00010a7e\U00010a80-\U00010b38\U00010b40-\U00011046\U0001104e-\U000110ba\U000110bd\U000110c2-\U0001113f\U00011144-\U000111c4\U000111c9-\U0001246f\U00012474-\U0010ffff",
"^c": "\u0000-\u005e\u0060-\u203e\u2041-\u2053\u2055-\ufe32\ufe35-\ufe4c\ufe50-\uff3e\uff40-\U0010ffff",
"^d": "\u0000-\u002c\u002e-\u0589\u058b-\u05bd\u05bf-\u13ff\u1401-\u1805\u1807-\u200f\u2016-\u2e16\u2e18-\u2e19\u2e1b-\u2e39\u2e3c-\u301b\u301d-\u302f\u3031-\u309f\u30a1-\ufe30\ufe33-\ufe57\ufe59-\ufe62\ufe64-\uff0c\uff0e-\U0010ffff",
"^e": "\u0000-\u0028\u002a-\u005c\u005c\u005e-\u007c\u007e-\u0f3a\u0f3c\u0f3e-\u169b\u169d-\u2045\u2047-\u207d\u207f-\u208d\u208f-\u2329\u232b-\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u2776-\u27c5\u27c7-\u27e6\u27e8\u27ea\u27ec\u27ee\u27f0-\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u2999-\u29d8\u29da\u29dc-\u29fc\u29fe-\u2e22\u2e24\u2e26\u2e28\u2e2a-\u3008\u300a\u300c\u300e\u3010\u3012-\u3014\u3016\u3018\u301a\u301c-\u301d\u3020-\ufd3e\ufd40-\ufe17\ufe19-\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe45-\ufe47\ufe49-\ufe59\ufe5b\ufe5d\ufe5f-\uff08\uff0a-\uff3c\uff3e-\uff5c\uff5e-\uff5f\uff61-\uff62\uff64-\U0010ffff",
"^f": "\u0000-\u00ba\u00bc-\u2018\u201a-\u201c\u201e-\u2039\u203b-\u2e02\u2e04\u2e06-\u2e09\u2e0b-\u2e0c\u2e0e-\u2e1c\u2e1e-\u2e20\u2e22-\U0010ffff",
"^i": "\u0000-\u00aa\u00ac-\u2017\u2019-\u201a\u201d-\u201e\u2020-\u2038\u203a-\u2e01\u2e03\u2e05-\u2e08\u2e0a-\u2e0b\u2e0d-\u2e1b\u2e1d-\u2e1f\u2e21-\U0010ffff",
"^o": "\u0000-\u0020\u0024\u0028-\u0029\u002b\u002d\u0030-\u0039\u003c-\u003e\u0041-\u005b\u005c\u005d-\u00a0\u00a2-\u00a6\u00a8-\u00b5\u00b8-\u00be\u00c0-\u037d\u037f-\u0386\u0388-\u0559\u0560-\u0588\u058a-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7-\u05f2\u05f5-\u0608\u060b\u060e-\u061a\u061c-\u061d\u0620-\u0669\u066e-\u06d3\u06d5-\u06ff\u070e-\u07f6\u07fa-\u082f\u083f-\u085d\u085f-\u0963\u0966-\u096f\u0971-\u0aef\u0af1-\u0df3\u0df5-\u0e4e\u0e50-\u0e59\u0e5c-\u0f03\u0f13\u0f15-\u0f84\u0f86-\u0fcf\u0fd5-\u0fd8\u0fdb-\u1049\u1050-\u10fa\u10fc-\u135f\u1369-\u166c\u166f-\u16ea\u16ee-\u1734\u1737-\u17d3\u17d7\u17db-\u17ff\u1806\u180b-\u1943\u1946-\u1a1d\u1a20-\u1a9f\u1aa7\u1aae-\u1b59\u1b61-\u1bfb\u1c00-\u1c3a\u1c40-\u1c7d\u1c80-\u1cbf\u1cc8-\u1cd2\u1cd4-\u2015\u2018-\u201f\u2028-\u202f\u2039-\u203a\u203f-\u2040\u2044-\u2046\u2052\u2054\u205f-\u2cf8\u2cfd\u2d00-\u2d6f\u2d71-\u2dff\u2e02-\u2e05\u2e09-\u2e0a\u2e0c-\u2e0d\u2e17\u2e1a\u2e1c-\u2e1d\u2e20-\u2e29\u2e2f\u2e3a-\u3000\u3004-\u303c\u303e-\u30fa\u30fc-\ua4fd\ua500-\ua60c\ua610-\ua672\ua674-\ua67d\ua67f-\ua6f1\ua6f8-\ua873\ua878-\ua8cd\ua8d0-\ua8f7\ua8fb-\ua92d\ua930-\ua95e\ua960-\ua9c0\ua9ce-\ua9dd\ua9e0-\uaa5b\uaa60-\uaadd\uaae0-\uaaef\uaaf2-\uabea\uabec-\ufe0f\ufe17-\ufe18\ufe1a-\ufe2f\ufe31-\ufe44\ufe47-\ufe48\ufe4d-\ufe4f\ufe53\ufe58-\ufe5e\ufe62-\ufe67\ufe69\ufe6c-\uff00\uff04\uff08-\uff09\uff0b\uff0d\uff10-\uff19\uff1c-\uff1e\uff21-\uff3b\uff3d-\uff60\uff62-\uff63\uff66-\U000100ff\U00010103-\U0001039e\U000103a0-\U000103cf\U000103d1-\U00010856\U00010858-\U0001091e\U00010920-\U0001093e\U00010940-\U00010a4f\U00010a59-\U00010a7e\U00010a80-\U00010b38\U00010b40-\U00011046\U0001104e-\U000110ba\U000110bd\U000110c2-\U0001113f\U00011144-\U000111c4\U000111c9-\U0001246f\U00012474-\U0010ffff",
"^s": "\u0000-\u0027\u0029-\u005a\u005c\u005c-\u007a\u007c-\u0f39\u0f3b\u0f3d-\u169a\u169c-\u2019\u201b-\u201d\u201f-\u2044\u2046-\u207c\u207e-\u208c\u208e-\u2328\u232a-\u2767\u2769\u276b\u276d\u276f\u2771\u2773\u2775-\u27c4\u27c6-\u27e5\u27e7\u27e9\u27eb\u27ed\u27ef-\u2982\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998-\u29d7\u29d9\u29db-\u29fb\u29fd-\u2e21\u2e23\u2e25\u2e27\u2e29-\u3007\u3009\u300b\u300d\u300f\u3011-\u3013\u3015\u3017\u3019\u301b-\u301c\u301e-\ufd3d\ufd3f-\ufe16\ufe18-\ufe34\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44-\ufe46\ufe48-\ufe58\ufe5a\ufe5c\ufe5e-\uff07\uff09-\uff3a\uff3c-\uff5a\uff5c-\uff5e\uff60-\uff61\uff63-\U0010ffff",
"c": "\u005f\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f",
"d": "\u002d\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d",
"e": "\u0029\u005c\u005d\u007d\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3f\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63",
"f": "\u00bb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21",
"i": "\u00ab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20",
"o": "\u0021-\u005c\u0023\u0025-\u0027\u002a\u002c\u002e-\u002f\u003a-\u003b\u003f-\u0040\u005c\u005c\u00a1\u00a7\u00b6-\u00b7\u00bf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u0af0\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010b39-\U00010b3f\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U000111c5-\U000111c8\U00012470-\U00012473",
"s": "\u0028\u005b\u007b\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3e\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62"
},
"s": {
"^": "\u0000-\u005c\u0023\u0025-\u002a\u002c-\u003b\u003f-\u005c\u005d\u005f\u0061-\u007b\u007d\u007f-\u00a1\u00a7\u00aa-\u00ab\u00ad\u00b2-\u00b3\u00b5-\u00b7\u00b9-\u00d6\u00d8-\u00f6\u00f8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0383\u0386-\u03f5\u03f7-\u0481\u0483-\u058e\u0590-\u0605\u0609-\u060a\u060c-\u060d\u0610-\u06dd\u06df-\u06e8\u06ea-\u06fc\u06ff-\u07f5\u07f7-\u09f1\u09f4-\u09f9\u09fc-\u0af0\u0af2-\u0b6f\u0b71-\u0bf2\u0bfb-\u0c7e\u0c80-\u0d78\u0d7a-\u0e3e\u0e40-\u0f00\u0f04-\u0f12\u0f14\u0f18-\u0f19\u0f20-\u0f33\u0f35\u0f37\u0f39-\u0fbd\u0fc6\u0fcd\u0fd0-\u0fd4\u0fd9-\u109d\u10a0-\u138f\u139a-\u17da\u17dc-\u193f\u1941-\u19dd\u1a00-\u1b60\u1b6b-\u1b73\u1b7d-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdc\u1fe0-\u1fec\u1ff0-\u1ffc\u1fff-\u2043\u2045-\u2051\u2053-\u2079\u207d-\u2089\u208d-\u209f\u20ba-\u20ff\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2150-\u218f\u2329-\u232a\u23f4-\u23ff\u2427-\u243f\u244b-\u249b\u24ea-\u24ff\u2700\u2768-\u2793\u27c5-\u27c6\u27e6-\u27ef\u2983-\u2998\u29d8-\u29db\u29fc-\u29fd\u2b4d-\u2b4f\u2b5a-\u2ce4\u2ceb-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u3003\u3005-\u3011\u3014-\u301f\u3021-\u3035\u3038-\u303d\u3040-\u309a\u309d-\u318f\u3192-\u3195\u31a0-\u31bf\u31e4-\u31ff\u321f-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\u32ff\u3400-\u4dbf\u4e00-\ua48f\ua4c7-\ua6ff\ua717-\ua71f\ua722-\ua788\ua78b-\ua827\ua82c-\ua835\ua83a-\uaa76\uaa7a-\ufb28\ufb2a-\ufbb1\ufbc2-\ufdfb\ufdfe-\ufe61\ufe63\ufe67-\ufe68\ufe6a-\uff03\uff05-\uff0a\uff0c-\uff1b\uff1f-\uff3d\uff3f\uff41-\uff5b\uff5d\uff5f-\uffdf\uffe7\uffef-\ufffb\ufffe-\U00010136\U00010140-\U00010178\U0001018a-\U0001018f\U0001019c-\U000101cf\U000101fd-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d165-\U0001d169\U0001d16d-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d1de-\U0001d1ff\U0001d242-\U0001d244\U0001d246-\U0001d2ff\U0001d357-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0bf-\U0001f0c0\U0001f0d0\U0001f0e0-\U0001f10f\U0001f12f\U0001f16c-\U0001f16f\U0001f19b-\U0001f1e5\U0001f203-\U0001f20f\U0001f23b-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f2ff\U0001f321-\U0001f32f\U0001f336\U0001f37d-\U0001f37f\U0001f394-\U0001f39f\U0001f3c5\U0001f3cb-\U0001f3df\U0001f3f1-\U0001f3ff\U0001f43f\U0001f441\U0001f4f8\U0001f4fd-\U0001f4ff\U0001f53e-\U0001f53f\U0001f544-\U0001f54f\U0001f568-\U0001f5fa\U0001f641-\U0001f644\U0001f650-\U0001f67f\U0001f6c6-\U0001f6ff\U0001f774-\U0010ffff",
"^c": "\u0000-\u005c\u0023\u0025-\u00a1\u00a6-\u058e\u0590-\u060a\u060c-\u09f1\u09f4-\u09fa\u09fc-\u0af0\u0af2-\u0bf8\u0bfa-\u0e3e\u0e40-\u17da\u17dc-\u209f\u20ba-\ua837\ua839-\ufdfb\ufdfd-\ufe68\ufe6a-\uff03\uff05-\uffdf\uffe2-\uffe4\uffe7-\U0010ffff",
"^k": "\u0000-\u005c\u005d\u005f\u0061-\u00a7\u00a9-\u00ae\u00b0-\u00b3\u00b5-\u00b7\u00b9-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0383\u0386-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdc\u1fe0-\u1fec\u1ff0-\u1ffc\u1fff-\u309a\u309d-\ua6ff\ua717-\ua71f\ua722-\ua788\ua78b-\ufbb1\ufbc2-\uff3d\uff3f\uff41-\uffe2\uffe4-\U0010ffff",
"^m": "\u0000-\u002a\u002c-\u003b\u003f-\u007b\u007d\u007f-\u00ab\u00ad-\u00b0\u00b2-\u00d6\u00d8-\u00f6\u00f8-\u03f5\u03f7-\u0605\u0609-\u2043\u2045-\u2051\u2053-\u2079\u207d-\u2089\u208d-\u2117\u2119-\u213f\u2145-\u214a\u214c-\u218f\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u27bf\u27c5-\u27c6\u27e6-\u27ef\u2800-\u28ff\u2983-\u2998\u29d8-\u29db\u29fc-\u29fd\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\ufb28\ufb2a-\ufe61\ufe63\ufe67-\uff0a\uff0c-\uff1b\uff1f-\uff5b\uff5d\uff5f-\uffe1\uffe3-\uffe8\uffed-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001eeef\U0001eef2-\U0010ffff",
"^o": "\u0000-\u00a5\u00a7-\u00a8\u00aa-\u00ad\u00af\u00b1-\u0481\u0483-\u060d\u0610-\u06dd\u06df-\u06e8\u06ea-\u06fc\u06ff-\u07f5\u07f7-\u09f9\u09fb-\u0b6f\u0b71-\u0bf2\u0bf9\u0bfb-\u0c7e\u0c80-\u0d78\u0d7a-\u0f00\u0f04-\u0f12\u0f14\u0f18-\u0f19\u0f20-\u0f33\u0f35\u0f37\u0f39-\u0fbd\u0fc6\u0fcd\u0fd0-\u0fd4\u0fd9-\u109d\u10a0-\u138f\u139a-\u193f\u1941-\u19dd\u1a00-\u1b60\u1b6b-\u1b73\u1b7d-\u20ff\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u2149\u214b\u214e\u2150-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2308-\u230b\u2320-\u2321\u2329-\u232a\u237c\u239b-\u23b3\u23dc-\u23e1\u23f4-\u23ff\u2427-\u243f\u244b-\u249b\u24ea-\u24ff\u25b7\u25c1\u25f8-\u25ff\u266f\u2700\u2768-\u2793\u27c0-\u27ff\u2900-\u2aff\u2b30-\u2b44\u2b47-\u2b4f\u2b5a-\u2ce4\u2ceb-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u3003\u3005-\u3011\u3014-\u301f\u3021-\u3035\u3038-\u303d\u3040-\u318f\u3192-\u3195\u31a0-\u31bf\u31e4-\u31ff\u321f-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\u32ff\u3400-\u4dbf\u4e00-\ua48f\ua4c7-\ua827\ua82c-\ua835\ua838\ua83a-\uaa76\uaa7a-\ufdfc\ufdfe-\uffe3\uffe5-\uffe7\uffe9-\uffec\uffef-\ufffb\ufffe-\U00010136\U00010140-\U00010178\U0001018a-\U0001018f\U0001019c-\U000101cf\U000101fd-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d165-\U0001d169\U0001d16d-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d1de-\U0001d1ff\U0001d242-\U0001d244\U0001d246-\U0001d2ff\U0001d357-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0bf-\U0001f0c0\U0001f0d0\U0001f0e0-\U0001f10f\U0001f12f\U0001f16c-\U0001f16f\U0001f19b-\U0001f1e5\U0001f203-\U0001f20f\U0001f23b-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f2ff\U0001f321-\U0001f32f\U0001f336\U0001f37d-\U0001f37f\U0001f394-\U0001f39f\U0001f3c5\U0001f3cb-\U0001f3df\U0001f3f1-\U0001f3ff\U0001f43f\U0001f441\U0001f4f8\U0001f4fd-\U0001f4ff\U0001f53e-\U0001f53f\U0001f544-\U0001f54f\U0001f568-\U0001f5fa\U0001f641-\U0001f644\U0001f650-\U0001f67f\U0001f6c6-\U0001f6ff\U0001f774-\U0010ffff",
"c": "\u0024\u00a2-\u00a5\u058f\u060b\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20b9\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6",
"k": "\u005e\u0060\u00a8\u00af\u00b4\u00b8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\ufbb2-\ufbc1\uff3e\uff40\uffe3",
"m": "\u002b\u003c-\u003e\u007c\u007e\u00ac\u00b1\u00d7\u00f7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2308-\u230b\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1",
"o": "\u00a6\u00a9\u00ae\u00b0\u0482\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u23f3\u2400-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u26ff\u2701-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b50-\u2b59\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U00010190-\U0001019b\U000101d0-\U000101fc\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1dd\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0be\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0df\U0001f110-\U0001f12e\U0001f130-\U0001f16b\U0001f170-\U0001f19a\U0001f1e6-\U0001f202\U0001f210-\U0001f23a\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f300-\U0001f320\U0001f330-\U0001f335\U0001f337-\U0001f37c\U0001f380-\U0001f393\U0001f3a0-\U0001f3c4\U0001f3c6-\U0001f3ca\U0001f3e0-\U0001f3f0\U0001f400-\U0001f43e\U0001f440\U0001f442-\U0001f4f7\U0001f4f9-\U0001f4fc\U0001f500-\U0001f53d\U0001f540-\U0001f543\U0001f550-\U0001f567\U0001f5fb-\U0001f640\U0001f645-\U0001f64f\U0001f680-\U0001f6c5\U0001f700-\U0001f773"
},
"z": {
"^": "\u0000-\u001f\u0021-\u009f\u00a1-\u167f\u1681-\u180d\u180f-\u1fff\u200b-\u2027\u202a-\u202e\u2030-\u205e\u2060-\u2fff\u3001-\U0010ffff",
"^l": "\u0000-\u2027\u2029-\U0010ffff",
"^p": "\u0000-\u2028\u202a-\U0010ffff",
"^s": "\u0000-\u001f\u0021-\u009f\u00a1-\u167f\u1681-\u180d\u180f-\u1fff\u200b-\u202e\u2030-\u205e\u2060-\u2fff\u3001-\U0010ffff",
"l": "\u2028",
"p": "\u2029",
"s": "\u0020\u00a0\u1680\u180e\u2000-\u200a\u202f\u205f\u3000"
}
}
| 846.885057
| 5,927
| 0.794365
| 10,469
| 73,679
| 5.590028
| 0.451524
| 0.001709
| 0.001282
| 0.001367
| 0.58566
| 0.547555
| 0.520779
| 0.506493
| 0.479563
| 0.465705
| 0
| 0.457101
| 0.010356
| 73,679
| 86
| 5,928
| 856.732558
| 0.345493
| 0.00076
| 0
| 0
| 1
| 0.630952
| 0.98302
| 0.981445
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011905
| 0
| 0.011905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc692383bc6fb0ab34207811309a22c67e008fbe
| 5,412
|
py
|
Python
|
tests/datasets/test_synthetic_data.py
|
murthyn/composer
|
2a04cf387dd8558556500f7ef2bc6d3d131043d5
|
[
"Apache-2.0"
] | null | null | null |
tests/datasets/test_synthetic_data.py
|
murthyn/composer
|
2a04cf387dd8558556500f7ef2bc6d3d131043d5
|
[
"Apache-2.0"
] | null | null | null |
tests/datasets/test_synthetic_data.py
|
murthyn/composer
|
2a04cf387dd8558556500f7ef2bc6d3d131043d5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 MosaicML. All Rights Reserved.
from typing import Optional
import pytest
import torch
from composer.datasets.synthetic import (SyntheticBatchPairDataset, SyntheticDataLabelType, SyntheticDataType,
SyntheticPILDataset)
@pytest.mark.parametrize('data_type', [
SyntheticDataType.GAUSSIAN,
SyntheticDataType.SEPARABLE,
])
@pytest.mark.parametrize('label_type', [
SyntheticDataLabelType.CLASSIFICATION_ONE_HOT,
SyntheticDataLabelType.CLASSIFICATION_INT,
])
def test_synthetic_data_creation(data_type: SyntheticDataType, label_type: SyntheticDataLabelType):
if data_type == SyntheticDataType.SEPARABLE:
if label_type != SyntheticDataLabelType.CLASSIFICATION_INT:
pytest.skip("Separable data requires classification int labels")
num_classes = 2
label_shape = None
else:
num_classes = 10
label_shape = (1, 10, 12)
# run run
return
dataset_size = 1000
data_shape = (3, 32, 32)
num_samples_to_create = 10
dataset = SyntheticBatchPairDataset(total_dataset_size=dataset_size,
data_shape=data_shape,
num_unique_samples_to_create=num_samples_to_create,
data_type=data_type,
label_type=label_type,
num_classes=num_classes,
label_shape=label_shape)
assert len(dataset) == dataset_size
# verify datapoints are correct
x, y = dataset[0]
assert x.size() == data_shape
if label_type == SyntheticDataLabelType.CLASSIFICATION_INT:
assert isinstance(y.item(), int)
elif label_type == SyntheticDataLabelType.CLASSIFICATION_ONE_HOT:
assert y.size() == (num_classes,)
assert torch.min(y) == 0
assert torch.max(y) == 1
# check that points were allocated in memory after the first call to __getitem__
assert dataset.input_data is not None
assert dataset.input_target is not None
# check that the correct number of points were allocated in memory
assert dataset.input_data.size()[0] == num_samples_to_create
assert dataset.input_target.size()[0] == num_samples_to_create
# verify that you can getch points outside the num_samples_to_create range
# (still within the total dataset size range)
x, y = dataset[num_samples_to_create + 1]
assert x is not None
assert y is not None
@pytest.mark.parametrize('label_type', [
SyntheticDataLabelType.CLASSIFICATION_ONE_HOT,
SyntheticDataLabelType.CLASSIFICATION_INT,
])
@pytest.mark.parametrize('num_classes', [None, 0])
def test_synthetic_classification_param_validation(label_type: SyntheticDataLabelType, num_classes: Optional[int]):
with pytest.raises(ValueError):
SyntheticBatchPairDataset(total_dataset_size=10,
data_shape=(2, 2),
label_type=label_type,
num_classes=num_classes)
@pytest.mark.parametrize('data_type', [
SyntheticDataType.GAUSSIAN,
SyntheticDataType.SEPARABLE,
])
@pytest.mark.parametrize('label_type', [
SyntheticDataLabelType.CLASSIFICATION_ONE_HOT,
SyntheticDataLabelType.CLASSIFICATION_INT,
])
def test_synthetic_image_data_creation(data_type: SyntheticDataType, label_type: SyntheticDataLabelType):
if data_type == SyntheticDataType.SEPARABLE:
if label_type != SyntheticDataLabelType.CLASSIFICATION_INT:
pytest.skip("Seperable data requires classification int labels")
num_classes = 2
label_shape = None
else:
num_classes = 10
label_shape = (1, 10, 12)
# run run
return
dataset_size = 1000
data_shape = (32, 32)
num_samples_to_create = 100
dataset = SyntheticPILDataset(total_dataset_size=dataset_size,
data_shape=data_shape,
num_unique_samples_to_create=num_samples_to_create,
data_type=data_type,
label_type=label_type,
num_classes=num_classes,
label_shape=label_shape)
assert len(dataset) == dataset_size
# verify datapoints are correct
x, y = dataset[0]
assert x.size == data_shape
if label_type == SyntheticDataLabelType.CLASSIFICATION_INT:
assert isinstance(y.item(), int)
elif label_type == SyntheticDataLabelType.CLASSIFICATION_ONE_HOT:
assert y.size() == (num_classes,)
assert torch.min(y) == 0
assert torch.max(y) == 1
# check that points were allocated in memory after the first call to __getitem__
assert dataset._dataset.input_data is not None
assert dataset._dataset.input_target is not None
# check that the correct number of points were allocated in memory
assert dataset._dataset.input_data.shape[0] == num_samples_to_create
assert dataset._dataset.input_target.shape[0] == num_samples_to_create
# verify that you can getch points outside the num_samples_to_create range
# (still within the total dataset size range)
x, y = dataset[num_samples_to_create + 1]
assert x is not None
assert y is not None
| 40.088889
| 115
| 0.663525
| 610
| 5,412
| 5.619672
| 0.172131
| 0.047258
| 0.06126
| 0.063011
| 0.847433
| 0.837515
| 0.819428
| 0.800758
| 0.771587
| 0.771587
| 0
| 0.01491
| 0.268847
| 5,412
| 134
| 116
| 40.38806
| 0.851403
| 0.11881
| 0
| 0.701923
| 0
| 0
| 0.033018
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.028846
| false
| 0
| 0.038462
| 0
| 0.086538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d1eef88d6880b387b8c8cfd904dc245890314fa
| 84,496
|
py
|
Python
|
deform/tests/test_widget.py
|
sixfeetup/deform
|
df0e4f8ad84d08ae4112a5a71a4518d3fe0a0d3d
|
[
"CC-BY-3.0"
] | null | null | null |
deform/tests/test_widget.py
|
sixfeetup/deform
|
df0e4f8ad84d08ae4112a5a71a4518d3fe0a0d3d
|
[
"CC-BY-3.0"
] | null | null | null |
deform/tests/test_widget.py
|
sixfeetup/deform
|
df0e4f8ad84d08ae4112a5a71a4518d3fe0a0d3d
|
[
"CC-BY-3.0"
] | null | null | null |
import unittest
from deform.compat import text_type
import colander
def invalid_exc(func, *arg, **kw):
try:
func(*arg, **kw)
except colander.Invalid as e:
return e
else:
raise AssertionError('Invalid not raised') # pragma: no cover
class TestWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import Widget
return Widget(**kw)
def test_ctor(self):
widget = self._makeOne(a=1, b=2)
self.assertEqual(widget.a, 1)
self.assertEqual(widget.b, 2)
def test_serialize(self):
widget = self._makeOne()
self.assertRaises(NotImplementedError, widget.serialize, None, None)
def test_deserialize(self):
widget = self._makeOne()
self.assertRaises(NotImplementedError, widget.deserialize, None, None)
def test_handle_error(self):
inner_widget = self._makeOne()
outer_widget = self._makeOne()
inner_field = DummyField()
inner_field.widget = inner_widget
outer_field = DummyField()
outer_field.widget = outer_widget
outer_field.children = [ inner_field ]
inner_error = DummyInvalid()
outer_error = DummyInvalid(inner_error)
outer_widget.handle_error(outer_field, outer_error)
self.assertEqual(inner_field.error, inner_error)
self.assertEqual(outer_field.error, outer_error)
def test_handle_error_already_has_error(self):
widget = self._makeOne()
widget.error = 'abc'
field = DummyField()
error = DummyInvalid()
widget.handle_error(field, error)
self.assertEqual(widget.error, 'abc')
class TestTextInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import TextInputWidget
return TextInputWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_not_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_strip(self):
widget = self._makeOne()
field = DummyField()
pstruct = ' abc '
result = widget.deserialize(field, pstruct)
self.assertEqual(result, 'abc')
def test_deserialize_no_strip(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = ' abc '
result = widget.deserialize(field, pstruct)
self.assertEqual(result, ' abc ')
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne()
field = DummyField()
pstruct = ''
result = widget.deserialize(field, pstruct)
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
pstruct = {}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestMoneyInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import MoneyInputWidget
return MoneyInputWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
self.assertEqual(renderer.kw['mask_options'], '{}')
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
self.assertEqual(renderer.kw['mask_options'], '{}')
def test_serialize_not_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(renderer.kw['mask_options'], '{}')
def test_serialize_not_null_with_options(self):
widget = self._makeOne(options={'allowZero':True})
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(renderer.kw['mask_options'], '{"allowZero": true}')
def test_serialize_not_null_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(renderer.kw['mask_options'], '{}')
def test_deserialize_strip(self):
widget = self._makeOne()
field = DummyField()
pstruct = ' abc '
result = widget.deserialize(field, pstruct)
self.assertEqual(result, 'abc')
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne()
field = DummyField()
pstruct = ''
result = widget.deserialize(field, pstruct)
self.assertEqual(result, colander.null)
def test_deserialize_with_default_thousands_separator(self):
widget = self._makeOne()
field = DummyField()
pstruct = '1,000,000.00'
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '1000000.00')
def test_deserialize_with_nondefault_thousands_separator(self):
widget = self._makeOne()
widget.options = {'thousands':'!'}
field = DummyField()
pstruct = '1!000!000.00'
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '1000000.00')
def test_deserialize_bad_type(self):
widget = self._makeOne(strip=False)
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
class TestAutocompleteInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import AutocompleteInputWidget
return AutocompleteInputWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_removed_delay(self):
widget = self._makeOne()
widget.delay = 300
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
self.assertRaises(ValueError, widget.serialize, field, None)
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_url(self):
import json
widget = self._makeOne()
url='http://example.com'
widget.values = url
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(json.loads(renderer.kw['options']),
{"limit": 8,
"minLength": 1,
"remote": "http://example.com?term=%QUERY"})
def test_serialize_iterable(self):
import json
widget = self._makeOne()
vals = [1,2,3,4]
widget.values = vals
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(json.loads(renderer.kw['options']),
{"local": [1,2,3,4],
"minLength": 1,
"limit": 8})
def test_serialize_not_null_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_strip(self):
widget = self._makeOne()
field = DummyField()
pstruct = ' abc '
result = widget.deserialize(field, pstruct)
self.assertEqual(result, 'abc')
def test_deserialize_no_strip(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = ' abc '
result = widget.deserialize(field, pstruct)
self.assertEqual(result, ' abc ')
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne()
field = DummyField()
pstruct = ''
result = widget.deserialize(field, pstruct)
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
class TestDateInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import DateInputWidget
return DateInputWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_not_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_date_submit(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'date': 'foo', 'date_submit': 'bar'})
self.assertEqual(result, 'bar')
def test_deserialize_date(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'date': 'foo', 'date_submit': ''})
self.assertEqual(result, 'foo')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
def test_deserialize_missing_fields(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = {'date': {}, 'date_submit': {}}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
def test_options_changed_and_default(self):
widget2 = self._makeOne()
widget = self._makeOne(options={'format': 'foo'})
self.assertEqual(widget.options['format'], 'foo')
self.assertEqual(widget2.options, None)
class TestTimeInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import TimeInputWidget
return TimeInputWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_not_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_success(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'time': '14:15:16'})
self.assertEqual(result, '14:15:16')
def test_deserialize_time_submit(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'time': '14:15:16', 'time_submit': '14:15:17'})
self.assertEqual(result, '14:15:17')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
def test_deserialize_missing_fields(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = {'time': {}, 'time_submit': '14:15:17'}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
def test_options_changed_and_default(self):
widget2 = self._makeOne()
widget = self._makeOne(options={'format': 'foo'})
self.assertEqual(widget.options['format'], 'foo')
self.assertEqual(widget2.options['format'], 'HH:i')
class TestDateTimeInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import DateTimeInputWidget
return DateTimeInputWidget(**kw)
def test_date_options_changed_and_default(self):
widget2 = self._makeOne()
widget = self._makeOne(date_options={'format': 'foo'})
self.assertEqual(widget.date_options['format'], 'foo')
self.assertEqual(widget2.date_options, None)
def test_serialize_with_timezone(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(DummySchema(), renderer=renderer)
cstruct = '2011-12-13T14:15:16+01:00'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['date'], '2011-12-13')
self.assertEqual(renderer.kw['time'], '14:15:16')
def test_serialize_with_timezone_and_microseconds(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(DummySchema(), renderer=renderer)
cstruct = '2011-12-13T14:15:16.10932+01:00'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertNotEqual(renderer.kw['cstruct'], cstruct)
self.assertEqual(renderer.kw['date'], '2011-12-13')
self.assertEqual(renderer.kw['time'], '14:15:16')
self.assertEqual(renderer.kw['cstruct'], '2011-12-13T14:15:16.10932')
def test_serialize_without_timezone(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(DummySchema(), renderer=renderer)
cstruct = '2011-12-13T14:15:16'
widget.serialize(field, cstruct)
self.assertEqual(renderer.kw['date'], '2011-12-13')
self.assertEqual(renderer.kw['time'], '14:15:16')
def test_serialize_no_separator(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(DummySchema(), renderer=renderer)
cstruct = ''
widget.serialize(field, cstruct)
self.assertEqual(renderer.kw['date'], '')
self.assertEqual(renderer.kw['time'], '')
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(DummySchema(), renderer=renderer)
cstruct = colander.null
widget.serialize(field, cstruct)
self.assertEqual(renderer.kw['date'], '')
self.assertEqual(renderer.kw['time'], '')
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
pstruct = colander.null
result = widget.deserialize(field, pstruct)
self.assertEqual(result, colander.null)
def test_deserialize_nochanges(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'',
'time':'14:15:16',
'time_submit':''
}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '2011-12-13T14:15:16')
def test_deserialize_date_changed(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'2011-12-12',
'time':'14:15:16',
'time_submit':''
}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '2011-12-12T14:15:16')
def test_deserialize_time_changed(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'',
'time':'14:15:16',
'time_submit':'14:15:15'
}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '2011-12-13T14:15:15')
def test_deserialize_date_and_time_changed(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'2011-12-12',
'time':'14:15:16',
'time_submit':'14:15:15'
}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, '2011-12-12T14:15:15')
def test_deserialize_no_date(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'',
'date_submit':'',
'time':'14:15:16',
'time_submit':'14:15:15'
}
self.assertRaises(colander.Invalid, widget.deserialize, field, pstruct)
def test_deserialize_no_time(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'2011-12-12',
'time':'',
'time_submit':''
}
self.assertRaises(colander.Invalid, widget.deserialize, field, pstruct)
def test_deserialize_no_time_no_date(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'',
'date_submit':'',
'time':'',
'time_submit':''
}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
def test_deserialize_missing_fields(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = {
'date':'2011-12-13',
'date_submit':'2011-12-12',
'time':'14:15:16',
'time_submit': {},
}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestHiddenWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import HiddenWidget
return HiddenWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_not_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize(self):
widget = self._makeOne()
field = DummyField()
pstruct = 'abc'
result = widget.deserialize(field, pstruct)
self.assertEqual(result, 'abc')
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne(strip=False)
field = DummyField()
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, ['a', 'b'])
class TestPasswordWidget(TestTextInputWidget):
def _makeOne(self, **kw):
from deform.widget import PasswordWidget
return PasswordWidget(**kw)
class TestTextAreaWidget(TestTextInputWidget):
def _makeOne(self, **kw):
from deform.widget import TextAreaWidget
return TextAreaWidget(**kw)
class TestRichTextWidget(TestTextInputWidget):
def _makeOne(self, **kw):
from deform.widget import RichTextWidget
return RichTextWidget(**kw)
def test_options(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
options = {
'theme_advanced_buttons1': 'bold,italic,bullist,numlist',
'verify_html': True,
'element_format': 'html'
}
widget = self._makeOne(options=options)
cstruct = 'abc'
widget.serialize(field, cstruct)
#Default options should be provided
result = renderer.kw['tinymce_options']
self.assertTrue('"height": 240' in result)
self.assertTrue('"width": 0' in result)
#Custom options should be set
self.assertTrue('"theme_advanced_buttons1": "bold,italic,bullist,numlist"' in result)
self.assertTrue('"verify_html": true' in result)
self.assertTrue('"element_format": "html"' in result)
class TestCheckboxWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import CheckboxWidget
return CheckboxWidget(**kw)
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, 'false')
def test_deserialize_true_val(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, 'true')
self.assertEqual(result, 'true')
def test_deserialize_false_val(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, 'false')
self.assertEqual(result, 'false')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
class TestRadioChoiceWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import RadioChoiceWidget
return RadioChoiceWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_null_alternate_null_value(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.null_value = 'fred'
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'fred')
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_other(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, 'true')
self.assertEqual(result, 'true')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
class TestSelectWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import SelectWidget
return SelectWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_None(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_null_alternate_null_value(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.null_value = 'fred'
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'fred')
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = 'abc'
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_integer_values(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne(values=((1, 'one'),))
widget.serialize(field, None)
self.assertEqual(renderer.kw['values'], [('1', 'one')])
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_null_value(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_other(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, 'true')
self.assertEqual(result, 'true')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
def test_deserialize_multiple(self):
widget = self._makeOne(multiple=True)
field = DummyField()
result = widget.deserialize(field, ['foo', 'bar'])
self.assertEqual(result, ['foo', 'bar'])
def test_deserialize_multiple_bad_type(self):
widget = self._makeOne(multiple=True)
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
def test_deserialize_multiple_bad_item(self):
widget = self._makeOne(multiple=True)
field = DummyField()
pstruct = ['foo', {}]
self.assertRaises(colander.Invalid, widget.deserialize, field, pstruct)
class TestCheckboxChoiceWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import CheckboxChoiceWidget
return CheckboxChoiceWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ())
def test_serialize_None(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ())
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = ('abc',)
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = ('abc',)
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_integer_values(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne(values=((1, 'one'),))
widget.serialize(field, None)
self.assertEqual(renderer.kw['values'], [('1', 'one')])
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_single_string(self):
# If only one checkbox was checked: DAMN HTML forms!
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, 'abc')
self.assertEqual(result, ('abc',))
def test_deserialize_other(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, ['abc'])
self.assertEqual(result, ('abc',))
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = ['abd', []]
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestCheckedInputWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import CheckedInputWidget
return CheckedInputWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
self.assertEqual(renderer.kw['confirm'], '')
def test_serialize_None(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
self.assertEqual(renderer.kw['confirm'], '')
def test_serialize_true(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, True)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], True)
self.assertEqual(renderer.kw['confirm'], True)
def test_serialize_false(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, False)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], False)
self.assertEqual(renderer.kw['confirm'], False)
def test_serialize_true_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, True, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], True)
self.assertEqual(renderer.kw['confirm'], True)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_empty(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'name':'',
'name-confirm':''})
self.assertEqual(result, colander.null)
self.assertEqual(field.error, None)
def test_deserialize_nonmatching(self):
widget = self._makeOne()
field = DummyField()
e = invalid_exc(widget.deserialize, field,
{'name':'password', 'name-confirm':'not'})
self.assertEqual(e.value, 'password')
self.assertEqual(e.msg, 'Fields did not match')
def test_deserialize_confirm_hint_on_field(self):
widget = self._makeOne()
field = DummyField()
e = invalid_exc(widget.deserialize, field,
{'name':'password', 'name-confirm':'not'})
self.assertEqual(e.value, 'password')
self.assertEqual(getattr(field, 'name-confirm', ''), 'not')
def test_deserialize_matching(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, {'name':'password',
'name-confirm':'password'})
self.assertEqual(result, 'password')
self.assertEqual(field.error, None)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
self.assertEqual(field.error, None)
def test_deserialize_missing_fields(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = {'name': 'x', 'name-confirm': ['x']}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestCheckedPasswordWidget(TestCheckedInputWidget):
def _makeOne(self, **kw):
from deform.widget import CheckedPasswordWidget
return CheckedPasswordWidget(**kw)
def test_deserialize_nonmatching(self):
widget = self._makeOne()
field = DummyField()
e = invalid_exc(widget.deserialize, field,
{'name':'password', 'name-confirm':'not'})
self.assertEqual(e.value, 'password')
self.assertEqual(e.msg, 'Password did not match confirm')
class TestFileUploadWidget(unittest.TestCase):
def _makeOne(self, tmpstore, **kw):
from deform.widget import FileUploadWidget
return FileUploadWidget(tmpstore, **kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], {})
def test_serialize_None(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], {})
def test_serialize_uid_not_in_tmpstore(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
cstruct = {'uid':'uid'}
widget.serialize(field, cstruct)
self.assertEqual(tmpstore['uid'], cstruct)
def test_serialize_uid_in_tmpstore(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
tmpstore = DummyTmpStore()
existing = {'uid':'santa'}
tmpstore['uid'] = existing
widget = self._makeOne(tmpstore)
cstruct = {'uid':'notsanta'}
widget.serialize(field, cstruct)
self.assertEqual(tmpstore['uid'], existing)
def test_serialize_uid_in_tmpstore_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
tmpstore = DummyTmpStore()
existing = {'uid':'santa'}
tmpstore['uid'] = existing
widget = self._makeOne(tmpstore)
cstruct = {'uid':'notsanta'}
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(tmpstore['uid'], existing)
def test_deserialize_null(self):
schema = DummySchema()
field = DummyField(schema)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_no_file_selected_no_previous_file(self):
schema = DummySchema()
field = DummyField(schema)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {})
self.assertEqual(result, colander.null)
def test_deserialize_no_file_selected_with_previous_file(self):
schema = DummySchema()
field = DummyField(schema)
tmpstore = DummyTmpStore()
tmpstore['uid'] = 'abc'
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {'uid':'uid'})
self.assertEqual(result, 'abc')
def test_deserialize_no_file_selected_with_previous_file_missing(self):
schema = DummySchema()
field = DummyField(schema)
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {'uid':'uid'})
self.assertEqual(result, colander.null)
def test_deserialize_file_selected_no_previous_file(self):
schema = DummySchema()
field = DummyField(schema)
upload = DummyUpload()
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {'upload':upload})
uid = list(tmpstore.keys())[0]
self.assertEqual(result['uid'], uid)
self.assertEqual(result['fp'], 'fp')
self.assertEqual(result['filename'], 'filename')
self.assertEqual(result['mimetype'], 'mimetype')
self.assertEqual(result['size'], 'size')
self.assertEqual(result['preview_url'], 'http://localhost/filename')
self.assertEqual(tmpstore[uid], result)
def test_deserialize_file_selected_with_previous_file(self):
schema = DummySchema()
field = DummyField(schema)
upload = DummyUpload()
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {'upload':upload, 'uid':'uid'})
self.assertEqual(result['uid'], 'uid')
self.assertEqual(result['fp'], 'fp')
self.assertEqual(result['filename'], 'filename')
self.assertEqual(result['mimetype'], 'mimetype')
self.assertEqual(result['size'], 'size')
self.assertEqual(result['preview_url'], 'http://localhost/filename')
self.assertEqual(tmpstore['uid'], result)
def test_deserialize_file_selected_with_previous_file_IE_whole_path(self):
schema = DummySchema()
field = DummyField(schema)
upload = DummyUpload()
upload.filename = r'c:\foo\bar\baz.pt'
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
result = widget.deserialize(field, {'upload':upload, 'uid':'uid'})
self.assertEqual(result['uid'], 'uid')
self.assertEqual(result['fp'], 'fp')
self.assertEqual(result['filename'], 'baz.pt')
self.assertEqual(result['mimetype'], 'mimetype')
self.assertEqual(result['size'], 'size')
self.assertEqual(result['preview_url'], 'http://localhost/baz.pt')
self.assertEqual(tmpstore['uid'], result)
def test_deserialize_bad_type(self):
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
def test_deserialize_bad_field(self):
tmpstore = DummyTmpStore()
widget = self._makeOne(tmpstore)
field = DummyField()
pstruct = {'upload': 'garbage'}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestDatePartsWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import DatePartsWidget
return DatePartsWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['year'], '')
self.assertEqual(renderer.kw['month'], '')
self.assertEqual(renderer.kw['day'], '')
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, '2010-12-1')
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['year'], '2010')
self.assertEqual(renderer.kw['month'], '12')
self.assertEqual(renderer.kw['day'], '1')
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, '2010-12-1', readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['year'], '2010')
self.assertEqual(renderer.kw['month'], '12')
self.assertEqual(renderer.kw['day'], '1')
def test_deserialize_not_null(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
result = widget.deserialize(field, {'year':'1', 'month':'2', 'day':'3'})
self.assertEqual(result, '1-2-3')
def test_deserialize_assume_y2k_2digit(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
result = widget.deserialize(field,
{'year':'01', 'month':'2', 'day':'3'})
self.assertEqual(result, '2001-2-3')
def test_deserialize_dont_assume_y2k_2digit(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
widget.assume_y2k = False
result = widget.deserialize(field,
{'year':'01', 'month':'2', 'day':'3'})
self.assertEqual(result, '01-2-3')
def test_deserialize_null(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptyfields(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
result = widget.deserialize(field,
{'year':'\t', 'month':'', 'day':''})
self.assertEqual(result, colander.null)
def test_deserialize_incomplete(self):
schema = DummySchema()
field = DummyField(schema, None)
widget = self._makeOne()
e = invalid_exc(widget.deserialize,
field, {'year':'1', 'month':'', 'day':''})
self.assertEqual(e.msg, 'Incomplete date')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, 'garbage')
def test_deserialize_missing_fields(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {})
def test_deserialize_bad_field(self):
widget = self._makeOne()
field = DummyField()
pstruct = {'year': '1970', 'month': '1', 'day': []}
self.assertRaises(colander.Invalid,
widget.deserialize, field, pstruct)
class TestMappingWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import MappingWidget
return MappingWidget(**kw)
def test_serialize_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], {})
def test_serialize_None(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], {})
def test_serialize_not_null(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = {'a':1}
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
cstruct = {'a':1}
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], cstruct)
def test_deserialize_null(self):
widget = self._makeOne()
field = DummyField()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, {})
def test_deserialize_non_null(self):
widget = self._makeOne()
field = DummyField()
inner_field = DummyField()
inner_field.name = 'a'
inner_widget = DummyWidget()
inner_widget.name = 'a'
inner_field.widget = inner_widget
field.children = [inner_field]
pstruct = {'a':1}
result = widget.deserialize(field, pstruct)
self.assertEqual(result, {'a':1})
def test_deserialize_error(self):
widget = self._makeOne()
field = DummyField()
inner_field = DummyField()
inner_field.name = 'a'
inner_widget = DummyWidget(
exc=colander.Invalid(inner_field, 'wrong', value='a'))
inner_widget.name = 'a'
inner_field.widget = inner_widget
field.children = [inner_field]
pstruct = {'a':1}
e = invalid_exc(widget.deserialize, field, pstruct)
self.assertEqual(e.value, {'a':'a'})
self.assertEqual(e.children[0].value, 'a')
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid,
widget.deserialize, field, ['a', 1])
class TestSequenceWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import SequenceWidget
return SequenceWidget(**kw)
def test_prototype_unicode(self):
from deform.compat import url_unquote
renderer = DummyRenderer(text_type('abc'))
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
protofield = DummyField(None, renderer)
field.children=[protofield]
result = widget.prototype(field)
self.assertEqual(type(result), str)
self.assertEqual(url_unquote(result), 'abc')
self.assertEqual(protofield.cloned, True)
def test_prototype_field_has_no_name(self):
from deform.compat import url_unquote
renderer = DummyRenderer(text_type('abc'))
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
protofield = DummyField(None, renderer)
protofield.name = ''
field.children=[protofield]
self.assertRaises(ValueError, widget.prototype, field)
def test_prototype_str(self):
from deform.compat import url_unquote
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
protofield = DummyField(None, renderer)
field.children=[protofield]
result = widget.prototype(field)
self.assertEqual(type(result), str)
self.assertEqual(url_unquote(result), 'abc')
self.assertEqual(protofield.cloned, True)
def test_serialize_null(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
result = widget.serialize(field, colander.null)
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 0)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], [])
self.assertEqual(renderer.template, widget.template)
def test_serialize_None(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
result = widget.serialize(field, None)
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 0)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], [])
self.assertEqual(renderer.template, widget.template)
def test_serialize_null_min_len_larger_than_cstruct(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.min_len = 2
result = widget.serialize(field, ['abc'])
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 2)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ['abc', colander.null])
self.assertEqual(renderer.template, widget.template)
def test_serialize_null_min_one(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.min_len = 1
result = widget.serialize(field, colander.null)
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 1)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], [colander.null])
self.assertEqual(renderer.template, widget.template)
def test_serialize_add_subitem_value(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.add_subitem_text_template = 'Yo ${subitem_description}'
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['add_subitem_text'].interpolate(),
'Yo description')
def test_serialize_add_subitem_translates_title(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer, {'title': 'titel'})
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.add_subitem_text_template = 'Yo ${subitem_title}'
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['add_subitem_text'].interpolate(),
'Yo titel')
def test_serialize_add_subitem_translates_title_with_default_domain(self):
# By default, we get a TranslationString whose domain is 'deform'
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer, {'title': 'titel'})
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.add_subitem_text_template = 'Yo ${subitem_title}'
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['add_subitem_text'].domain, 'deform')
def test_serialize_add_subitem_translates_title_with_another_domain(self):
from translationstring import TranslationStringFactory
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer, {'title': 'titel'})
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
# Here we provide our own TranslationString with a custom domain
custom_domain = 'not_deform'
_ = TranslationStringFactory(custom_domain)
widget.add_subitem_text_template = _('Yo ${subitem_title}')
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['add_subitem_text'].domain,
custom_domain)
def test_serialize_add_subitem_translates_description(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer, {'description': 'omschrijving'})
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.add_subitem_text_template = 'Yo ${subitem_description}'
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['add_subitem_text'].interpolate(),
'Yo omschrijving')
def test_serialize_subitem_value(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children=[inner]
widget = self._makeOne()
widget.serialize(field, colander.null)
self.assertEqual(renderer.kw['item_field'], inner)
def test_serialize_not_null(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children = [inner]
widget = self._makeOne()
result = widget.serialize(field, ['123'])
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 1)
self.assertEqual(renderer.kw['subfields'][0], ('123', inner))
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ['123'])
self.assertEqual(renderer.template, widget.template)
def test_serialize_not_null_readonly(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
inner = DummyField()
field.children = [inner]
widget = self._makeOne()
result = widget.serialize(field, ['123'], readonly=True)
self.assertEqual(result, 'abc')
self.assertEqual(len(renderer.kw['subfields']), 1)
self.assertEqual(renderer.kw['subfields'][0], ('123', inner))
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ['123'])
self.assertEqual(renderer.template, widget.readonly_template)
def test_serialize_with_sequence_widgets(self):
renderer = DummyRenderer('abc')
schema = DummySchema()
field = DummyField(schema, renderer)
widget = self._makeOne()
inner = DummyField()
field.children = [inner]
sequence_field = DummyField()
field.sequence_fields = [sequence_field]
result = widget.serialize(field, ['123'])
self.assertEqual(result, 'abc')
subfields = renderer.kw['subfields']
self.assertEqual(len(subfields), 1)
self.assertEqual(subfields[0], ('123', sequence_field))
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], ['123'])
self.assertEqual(renderer.template, widget.template)
def test_deserialize_null(self):
field = DummyField()
inner_field = DummyField()
field.children = [inner_field]
widget = self._makeOne()
result = widget.deserialize(field, colander.null)
self.assertEqual(result, [])
self.assertEqual(field.sequence_fields, [])
def test_deserialize_not_null(self):
field = DummyField()
inner_field = DummyField()
inner_field.widget = DummyWidget()
field.children = [inner_field]
widget = self._makeOne()
result = widget.deserialize(field, ['123'])
self.assertEqual(result, ['123'])
self.assertEqual(len(field.sequence_fields), 1)
self.assertEqual(field.sequence_fields[0], inner_field)
def test_deserialize_error(self):
field = DummyField()
inner_field = DummyField()
inner_field.widget = DummyWidget(
exc=colander.Invalid(inner_field, 'wrong', 'a'))
field.children = [inner_field]
widget = self._makeOne()
e = invalid_exc(widget.deserialize, field, ['123'])
self.assertEqual(e.value, ['a'])
self.assertEqual(e.children[0].value, 'a')
def test_handle_error(self):
field = DummyField()
widget = self._makeOne()
inner_widget = DummyWidget()
inner_invalid = DummyInvalid()
inner_invalid.pos = 0
error = DummyInvalid(inner_invalid)
inner_field = DummyField()
inner_field.widget = inner_widget
field.sequence_fields = [inner_field]
widget.handle_error(field, error)
self.assertEqual(field.error, error)
self.assertEqual(inner_widget.error, inner_invalid)
def test_handle_error_already_has_error(self):
widget = self._makeOne()
widget.error = 'abc'
field = DummyField()
error = DummyInvalid()
widget.handle_error(field, error)
self.assertEqual(widget.error, 'abc')
def test_deserialize_bad_type(self):
field = DummyField()
inner_field = DummyField()
inner_field.widget = DummyWidget()
field.children = [inner_field]
widget = self._makeOne()
self.assertRaises(colander.Invalid,
widget.deserialize, field, {'x': '123'})
class TestFormWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import FormWidget
return FormWidget(**kw)
def test_template(self):
form = self._makeOne()
self.assertEqual(form.template, 'form')
class TestTextAreaCSVWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import TextAreaCSVWidget
return TextAreaCSVWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_with_unparseable(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
field.unparseable = 'aloooo'
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'aloooo')
def test_serialize_not_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = [('a', '1')]
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'a,1\r\n')
def test_serialize_not_None_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = [('a', '1')]
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'a,1\r\n')
def test_deserialize(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = 'a,1\r\n'
result = widget.deserialize(field, pstruct)
self.assertEqual(result, [['a', '1']])
def test_deserialize_bad_csv(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = 'a,1\raa\r\r\n\n'
self.assertRaises(colander.Invalid, widget.deserialize, field, pstruct)
self.assertEqual(field.unparseable, pstruct)
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
schema = DummySchema()
schema.required = False
field = DummyField(schema=schema)
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne(strip=False)
schema = DummySchema()
schema.required = False
field = DummyField(schema=schema)
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, [])
def test_handle_error_outermost_has_msg(self):
widget = self._makeOne()
error = DummyInvalid()
error.msg = 'msg'
field = DummyField()
widget.handle_error(field, error)
self.assertEqual(field.error, error)
def test_handle_error_children_have_msgs(self):
widget = self._makeOne()
error = DummyInvalid()
inner_error1 = DummyInvalid()
inner_error1.msg = 'a'
inner_error1.pos = 0
inner_error2 = DummyInvalid()
inner_error2.msg = 'b'
inner_error2.pos = 1
error.children = [ inner_error1, inner_error2 ]
error.msg = None
field = DummyField()
field.schema = None
widget.handle_error(field, error)
self.assertEqual(field.error.msg, 'line 1: Invalid\nline 2: Invalid')
class TestTextInputCSVWidget(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import TextInputCSVWidget
return TextInputCSVWidget(**kw)
def test_serialize_null(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
widget.serialize(field, colander.null)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], '')
def test_serialize_with_unparseable(self):
widget = self._makeOne()
renderer = DummyRenderer()
field = DummyField(None, renderer=renderer)
field.unparseable = 'aloooo'
widget.serialize(field, None)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'aloooo')
def test_serialize_not_None(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = ('a', '1')
widget.serialize(field, cstruct)
self.assertEqual(renderer.template, widget.template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'a,1')
def test_serialize_not_None_readonly(self):
widget = self._makeOne()
renderer = DummyRenderer()
schema = DummySchema()
field = DummyField(schema, renderer=renderer)
cstruct = ('a', '1')
widget.serialize(field, cstruct, readonly=True)
self.assertEqual(renderer.template, widget.readonly_template)
self.assertEqual(renderer.kw['field'], field)
self.assertEqual(renderer.kw['cstruct'], 'a,1')
def test_deserialize(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = 'a,1\r\n'
result = widget.deserialize(field, pstruct)
self.assertEqual(result, ['a', '1'])
def test_deserialize_bad_csv(self):
widget = self._makeOne(strip=False)
field = DummyField()
pstruct = 'a,1\raa\r\r\n\n'
self.assertRaises(colander.Invalid, widget.deserialize, field, pstruct)
self.assertEqual(field.unparseable, pstruct)
def test_deserialize_null(self):
widget = self._makeOne(strip=False)
schema = DummySchema()
schema.required = False
field = DummyField(schema=schema)
result = widget.deserialize(field, colander.null)
self.assertEqual(result, colander.null)
def test_deserialize_emptystring(self):
widget = self._makeOne(strip=False)
schema = DummySchema()
schema.required = False
field = DummyField(schema=schema)
result = widget.deserialize(field, '')
self.assertEqual(result, colander.null)
def test_deserialize_bad_type(self):
widget = self._makeOne()
field = DummyField()
self.assertRaises(colander.Invalid, widget.deserialize, field, [])
def test_handle_error_outermost_has_msg(self):
widget = self._makeOne()
error = DummyInvalid()
error.msg = 'msg'
field = DummyField()
widget.handle_error(field, error)
self.assertEqual(field.error, error)
def test_handle_error_children_have_msgs(self):
widget = self._makeOne()
error = DummyInvalid()
inner_error1 = DummyInvalid()
inner_error1.msg = 'a'
inner_error2 = DummyInvalid()
inner_error2.msg = 'b'
error.children = [ inner_error1, inner_error2 ]
error.msg = None
field = DummyField()
field.schema = None
widget.handle_error(field, error)
self.assertEqual(field.error.msg, 'Invalid\nInvalid')
class TestResourceRegistry(unittest.TestCase):
def _makeOne(self, **kw):
from deform.widget import ResourceRegistry
return ResourceRegistry(**kw)
def test_use_defaults(self):
from deform.widget import default_resources
reg = self._makeOne()
self.assertEqual(reg.registry, default_resources)
def test_dont_use_defaults(self):
from deform.widget import default_resources
reg = self._makeOne(use_defaults=False)
self.assertNotEqual(reg.registry, default_resources)
def test_set_js_resources(self):
reg = self._makeOne()
reg.set_js_resources('abc', '123', 1, 2)
self.assertEqual(reg.registry['abc']['123']['js'], (1,2))
def test_set_css_resources(self):
reg = self._makeOne()
reg.set_css_resources('abc', '123', 1, 2)
self.assertEqual(reg.registry['abc']['123']['css'], (1,2))
def test___call___no_requirement(self):
reg = self._makeOne()
self.assertRaises(ValueError, reg.__call__, ( ('abc', 'def'), ))
def test___call___no_version(self):
reg = self._makeOne()
reg.registry = {'abc':{'123':{'js':(1,2)}}}
self.assertRaises(ValueError, reg.__call__, ( ('abc', 'def'), ))
def test___call___(self):
reg = self._makeOne()
reg.registry = {'abc':{'123':{'js':(1,2)}}}
result = reg([('abc', '123')])
self.assertEqual(result, {'js':[1,2], 'css':[]})
def test___call___leaf_isnt_iterable(self):
reg = self._makeOne()
reg.registry = {'abc':{'123':{'js':'123', 'css':'2'}}}
result = reg([('abc', '123')])
self.assertEqual(result, {'js':['123'], 'css':['2']})
class TestNormalizeChoices(unittest.TestCase):
def _call(self, values):
from deform.widget import _normalize_choices
return _normalize_choices(values)
def test_empty(self):
self.assertEqual(self._call(()), [])
def test_string(self):
self.assertEqual(self._call((('value', 'description'),)),
[('value', 'description')])
def test_text_type(self):
self.assertEqual(self._call(((text_type('value'), 'description'),)),
[('value', 'description')])
def test_integer(self):
self.assertEqual(self._call(((1, 'description'),)),
[('1', 'description')])
def test_optgroup_and_tuple(self):
from deform.widget import OptGroup
optgroup = OptGroup('label', (2, 'two'))
normalized = self._call(((1, 'description'), optgroup))
self.assertEqual(len(normalized), 2)
self.assertEqual(normalized[0], ('1', 'description'))
self.assertTrue(isinstance(normalized[1], OptGroup))
self.assertEqual(normalized[1].label, 'label')
self.assertEqual(normalized[1].options, (('2', 'two'), ))
class DummyRenderer(object):
def __init__(self, result=''):
self.result = result
def __call__(self, template, **kw):
self.template = template
self.kw = kw
return self.result
class DummyWidget(object):
name = 'name'
def __init__(self, exc=None):
self.exc = exc
def deserialize(self, field, pstruct):
if self.exc:
raise self.exc
return pstruct
def handle_error(self, field, error):
self.error = error
class DummySchema(object):
pass
class DummyInvalid(object):
pos = 0
def __init__(self, *children):
self.children = children
def __str__(self):
return 'Invalid'
class DummyField(object):
default = None
error = None
children = ()
title = 'title'
description = 'description'
name = 'name'
cloned = False
oid = 'deformField1'
required = True
cstruct = colander.null
def __init__(self, schema=None, renderer=None, translations=None):
self.schema = schema
self.renderer = renderer
self.translations = translations
def clone(self):
self.cloned = True
return self
def deserialize(self, pstruct):
return self.widget.deserialize(self, pstruct)
def translate(self, term):
if self.translations is None:
return term
return self.translations.get(term, term)
def render_template(self, template, **kw):
return self.renderer(template, **kw)
class DummyTmpStore(dict):
def preview_url(self, uid):
return 'http://localhost/%s' % self[uid]['filename']
class DummyUpload(object):
file = 'fp'
filename = 'filename'
type = 'mimetype'
length = 'size'
def __nonzero__(self): # pragma: no cover
# cgi.FieldStorage for file uploads are falsey
return False
| 37.654189
| 93
| 0.630917
| 8,492
| 84,496
| 6.135304
| 0.043688
| 0.116888
| 0.107714
| 0.082532
| 0.874417
| 0.859561
| 0.844705
| 0.830579
| 0.812153
| 0.789889
| 0
| 0.009971
| 0.248651
| 84,496
| 2,243
| 94
| 37.670976
| 0.810698
| 0.003775
| 0
| 0.778684
| 0
| 0
| 0.051587
| 0.002733
| 0
| 0
| 0
| 0
| 0.233044
| 1
| 0.136665
| false
| 0.008669
| 0.019378
| 0.00255
| 0.199898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
075f06a471c0b34b5d964a4bdb738bcd2b11fe90
| 21,218
|
py
|
Python
|
ports/esp32/font.py
|
kekemuyu/linewatch
|
2cbba739a3773dafc8ebbe46cb1f1ce3b467c4bb
|
[
"MIT"
] | 22
|
2020-11-12T11:30:44.000Z
|
2022-03-04T08:41:49.000Z
|
ports/esp32/font.py
|
kekemuyu/linewatch
|
2cbba739a3773dafc8ebbe46cb1f1ce3b467c4bb
|
[
"MIT"
] | 1
|
2020-11-23T10:02:42.000Z
|
2020-11-30T12:33:27.000Z
|
ports/esp32/font.py
|
kekemuyu/linewatch
|
2cbba739a3773dafc8ebbe46cb1f1ce3b467c4bb
|
[
"MIT"
] | 9
|
2020-11-12T10:23:27.000Z
|
2021-04-18T14:46:24.000Z
|
class Font:
icon={
0: #clock
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1F,0xF8,0x00,0x00,0x7F,0xFE,0x00,
0x01,0xF8,0x1F,0x80,0x03,0xE0,0x07,0xC0,0x07,0x80,0x01,0xE0,0x0F,0x00,0x00,0xF0,
0x0E,0x01,0x80,0x70,0x1C,0x01,0x80,0x38,0x1C,0x01,0x80,0x38,0x38,0x01,0x80,0x1C,
0x38,0x01,0x80,0x1C,0x30,0x01,0x80,0x0C,0x30,0x01,0x80,0x0C,0x30,0x01,0xFE,0x0C,
0x30,0x01,0xFE,0x0C,0x30,0x00,0x00,0x0C,0x30,0x00,0x00,0x0C,0x38,0x00,0x00,0x1C,
0x38,0x00,0x00,0x1C,0x1C,0x00,0x00,0x38,0x1C,0x00,0x00,0x38,0x0E,0x00,0x00,0x70,
0x0F,0x00,0x00,0xF0,0x07,0x80,0x01,0xE0,0x03,0xE0,0x07,0xC0,0x01,0xF8,0x1F,0x80,
0x00,0x7F,0xFE,0x00,0x00,0x1F,0xF8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
1: #alarm
[0x00,0x00,0x00,0x00,0x00,0x03,0xC0,0x00,0x07,0xC3,0xC3,0xE0,0x1F,0xE7,0xE7,0xF8,
0x3F,0xFF,0xFF,0xFC,0x3F,0xFF,0xFF,0xFC,0x7F,0xFE,0x7F,0xFE,0x7F,0xF0,0x0F,0xFE,
0x7F,0xC1,0x83,0xFE,0x7F,0x81,0x81,0xFE,0x3F,0x01,0x80,0xFC,0x3E,0x01,0x80,0x7C,
0x1E,0x01,0x80,0x78,0x1C,0x01,0x80,0x38,0x1C,0x01,0x80,0x38,0x1C,0x03,0xC0,0x38,
0x1C,0x03,0xC0,0x38,0x1C,0x03,0xE0,0x38,0x1C,0x06,0xF0,0x38,0x1C,0x0C,0x78,0x38,
0x1E,0x18,0x38,0x78,0x1E,0x30,0x10,0x78,0x0F,0x20,0x00,0xF0,0x0F,0x80,0x01,0xF0,
0x07,0xC0,0x03,0xE0,0x03,0xE0,0x07,0xC0,0x01,0xFC,0x3F,0x80,0x03,0xFF,0xFF,0xC0,
0x07,0xFF,0xFF,0xE0,0x0F,0x0F,0xF0,0xF0,0x06,0x00,0x00,0x60,0x00,0x00,0x00,0x00],
2: #set
[0x00,0x03,0xC0,0x00,0x00,0x03,0xC0,0x00,0x00,0x07,0xC0,0x00,0x02,0x07,0xE0,0x40,
0x07,0x07,0xE0,0xE0,0x0F,0x9F,0xF9,0xF0,0x1F,0xFF,0xFF,0xF0,0x0F,0xFF,0xFF,0xF0,
0x07,0xF8,0x1F,0xE0,0x03,0xE0,0x07,0xC0,0x03,0xC7,0xE3,0xC0,0x07,0x8F,0xF1,0xE0,
0x07,0x9F,0xF9,0xE0,0x3F,0x3F,0xFC,0xFC,0xFF,0x3F,0xFC,0xFF,0xFF,0x3F,0xFC,0xFF,
0xFF,0x3F,0xFC,0xFF,0xFF,0x3F,0xFC,0xFF,0x1F,0x3F,0xFC,0xF8,0x07,0x9F,0xF9,0xE0,
0x07,0x8F,0xF1,0xE0,0x03,0xC7,0xE3,0xC0,0x03,0xE0,0x07,0xC0,0x07,0xF8,0x1F,0xE0,
0x0F,0xFF,0xFF,0xF0,0x1F,0xFF,0xFF,0xF8,0x0F,0x9F,0xF9,0xF0,0x07,0x07,0xE0,0xE0,
0x00,0x07,0xE0,0x40,0x00,0x07,0xC0,0x00,0x00,0x03,0xC0,0x00,0x00,0x03,0xC0,0x00],
3: #weather
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0F,0xC0,0x00,0x00,0x30,0x30,0x00,0x00,0x40,0x08,0x00,
0x00,0x80,0x04,0x00,0x07,0x80,0x02,0x00,0x08,0x70,0x02,0x00,0x10,0x00,0x03,0xE0,
0x20,0x00,0x02,0x10,0x20,0x00,0x00,0x08,0x20,0x00,0x00,0x04,0x20,0x00,0x00,0x04,
0x20,0x00,0x00,0x04,0x20,0x00,0x00,0x04,0x20,0x00,0x00,0x04,0x10,0x00,0x00,0x08,
0x08,0x00,0x00,0x10,0x07,0xFF,0xFF,0xE0,0x00,0x00,0x00,0x00],
4: #appstore
[0x00,0x00,0x00,0x00,0x3F,0x00,0x00,0x00,0x7F,0x80,0x00,0x00,0x7F,0xE0,0x00,0x00,
0xFF,0xF8,0x00,0x00,0xFF,0xFE,0x00,0x00,0xF3,0xFF,0x80,0x00,0xF1,0xFF,0xE0,0x00,
0xF0,0xF7,0xF8,0x00,0xF0,0x79,0xFE,0x00,0xF0,0x3C,0x3F,0x00,0xF0,0x3E,0x1F,0xC0,
0xF0,0x1F,0x9F,0xF8,0xF0,0x0F,0xFF,0xFE,0xF0,0x07,0xFC,0x7E,0xF0,0x03,0xF8,0x3E,
0xF0,0x03,0xF8,0x3E,0xF0,0x07,0xFC,0x7E,0xF0,0x0F,0xFF,0xFE,0xF0,0x1F,0x9F,0xF8,
0xF0,0x3E,0x1F,0xC0,0xF0,0x7C,0x3F,0x00,0xF0,0xF9,0xFE,0x00,0xF1,0xF7,0xF8,0x00,
0xF3,0xFF,0xE0,0x00,0xF3,0xFF,0x80,0x00,0xFF,0xFE,0x00,0x00,0xFF,0xF8,0x00,0x00,
0x7F,0xE0,0x00,0x00,0x7F,0x80,0x00,0x00,0x3F,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
5: #compass
[0x00,0x00,0x00,0x00,0x00,0x07,0xE0,0x00,0x00,0x3F,0xFC,0x00,0x00,0xFF,0xFF,0x00,
0x01,0xFF,0xFF,0x80,0x07,0xF0,0x0F,0xE0,0x0F,0xC0,0x03,0xF0,0x0F,0x00,0x00,0xF0,
0x1E,0x00,0x00,0x78,0x3C,0x00,0x00,0x3C,0x3C,0x00,0x0C,0x3C,0x38,0x00,0x3C,0x1C,
0x78,0x00,0xF8,0x1E,0x78,0x03,0xF8,0x1E,0x70,0x07,0xF0,0x0E,0x70,0x04,0xF0,0x0E,
0x70,0x08,0x60,0x0E,0x70,0x08,0x60,0x0E,0x78,0x10,0xC0,0x1E,0x78,0x13,0x00,0x1E,
0x38,0x3C,0x00,0x1C,0x3C,0x30,0x00,0x3C,0x3C,0x00,0x00,0x3C,0x1E,0x00,0x00,0x78,
0x0F,0x00,0x00,0xF0,0x0F,0xC0,0x03,0xF0,0x07,0xF0,0x0F,0xE0,0x01,0xFF,0xFF,0x80,
0x00,0xFF,0xFF,0x00,0x00,0x3F,0xFC,0x00,0x00,0x07,0xE0,0x00,0x00,0x00,0x00,0x00]
}
hanzi={
0: #时
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x03,0xC0,
0x00,0x00,0x03,0x80,0x30,0x30,0x03,0x80,0x3F,0xF8,0x03,0x80,0x38,0x78,0x03,0x80,
0x38,0x70,0x03,0x98,0x38,0x70,0x03,0xBC,0x38,0x7F,0xFF,0xFE,0x38,0x70,0x03,0x86,
0x38,0x70,0x03,0x80,0x38,0x70,0x03,0x80,0x38,0x77,0x03,0x80,0x3F,0xF3,0x83,0x80,
0x38,0x71,0xC3,0x80,0x38,0x71,0xE3,0x80,0x38,0x70,0xE3,0x80,0x38,0x70,0xE3,0x80,
0x38,0x70,0x03,0x80,0x38,0x70,0x03,0x80,0x38,0x70,0x03,0x80,0x3F,0xF0,0x03,0x80,
0x38,0x70,0x03,0x80,0x38,0x70,0x03,0x80,0x38,0x60,0x03,0x80,0x30,0x00,0x07,0x80,
0x00,0x00,0x7F,0x80,0x00,0x00,0x0F,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
1:#钟
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x0C,0x00,0x07,0xC0,0x0F,0x00,
0x07,0x80,0x0E,0x00,0x07,0x00,0x0E,0x00,0x07,0x1C,0x0E,0x00,0x0F,0xFC,0x0E,0x00,
0x0E,0x06,0x0E,0x00,0x0E,0x03,0x8E,0x3C,0x1C,0x03,0xFF,0xFC,0x1C,0x33,0x8E,0x38,
0x3F,0xFB,0x8E,0x38,0x33,0x8F,0x8E,0x38,0x73,0x83,0x8E,0x38,0x63,0x83,0x8E,0x38,
0x43,0x83,0x8E,0x38,0x03,0x9F,0x8E,0x38,0x7F,0xFF,0xFF,0xF8,0x03,0x87,0x8E,0x38,
0x03,0x83,0x0E,0x00,0x03,0x80,0x0E,0x00,0x03,0x86,0x0E,0x00,0x03,0x9C,0x0E,0x00,
0x03,0xB8,0x0E,0x00,0x03,0xF0,0x0E,0x00,0x03,0xE0,0x0E,0x00,0x07,0xC0,0x0E,0x00,
0x03,0x80,0x0E,0x00,0x00,0x00,0x0E,0x00,0x00,0x00,0x0C,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
3:#闹
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x80,0x00,0x00,0x01,0xC0,0x00,0x30,
0x01,0xEF,0xFF,0xF8,0x00,0xE0,0x00,0x78,0x18,0xE0,0x00,0x70,0x1E,0xC7,0x00,0x70,
0x1E,0x03,0xC0,0x70,0x1E,0x03,0xC0,0x70,0x1E,0x01,0x87,0x70,0x1F,0xFF,0xFF,0xF0,
0x1E,0x03,0xC1,0xF0,0x1E,0x03,0xC0,0x70,0x1E,0x03,0xC0,0x70,0x1E,0x63,0xCE,0x70,
0x1E,0x7F,0xFF,0x70,0x1E,0x73,0xCE,0x70,0x1E,0x73,0xCE,0x70,0x1E,0x73,0xCE,0x70,
0x1E,0x73,0xCE,0x70,0x1E,0x73,0xCE,0x70,0x1E,0x73,0xCE,0x70,0x1E,0x73,0xFE,0x70,
0x1E,0x63,0xDC,0x70,0x1E,0x03,0xC0,0x70,0x1E,0x03,0xC0,0x70,0x1E,0x03,0xC0,0x70,
0x1E,0x03,0x07,0xF0,0x1E,0x00,0x00,0xF0,0x1C,0x00,0x00,0xC0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
4:#天
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0xC0,
0x00,0x00,0x03,0xE0,0x0F,0xFF,0xFF,0xF0,0x00,0x03,0x80,0x00,0x00,0x03,0x80,0x00,
0x00,0x03,0x80,0x00,0x00,0x03,0x80,0x00,0x00,0x07,0x80,0x00,0x00,0x07,0x80,0x00,
0x00,0x07,0x80,0x70,0x00,0x07,0x80,0xF8,0x7F,0xFF,0xFF,0xFC,0x00,0x07,0xC0,0x0C,
0x00,0x07,0xE0,0x00,0x00,0x07,0x60,0x00,0x00,0x0F,0x70,0x00,0x00,0x0E,0x30,0x00,
0x00,0x1E,0x38,0x00,0x00,0x1C,0x38,0x00,0x00,0x3C,0x1C,0x00,0x00,0x38,0x0E,0x00,
0x00,0x70,0x0F,0x00,0x00,0xE0,0x07,0x80,0x01,0xC0,0x03,0xE0,0x03,0x80,0x01,0xF8,
0x07,0x00,0x00,0xFE,0x1C,0x00,0x00,0x78,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
5:#气
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xE0,0x00,0x00,0x00,0xF0,0x00,0x00,
0x01,0xF0,0x00,0x00,0x01,0xC0,0x00,0xE0,0x03,0xC0,0x01,0xF0,0x03,0xFF,0xFF,0xF8,
0x07,0x80,0x00,0x00,0x07,0x00,0x03,0x80,0x0E,0x00,0x07,0xC0,0x0E,0xFF,0xFE,0xC0,
0x1C,0x00,0x00,0x00,0x38,0x00,0x06,0x00,0x37,0xFF,0xFF,0x80,0x63,0x00,0x07,0x80,
0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,
0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x07,0x00,
0x00,0x00,0x07,0x8C,0x00,0x00,0x03,0x8C,0x00,0x00,0x03,0xDC,0x00,0x00,0x01,0xFC,
0x00,0x00,0x01,0xFC,0x00,0x00,0x00,0x7C,0x00,0x00,0x00,0x1C,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00 ],
6:#设
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x03,0x83,0x80,
0x07,0x83,0xFF,0xC0,0x03,0xC3,0xC3,0x80,0x03,0xC3,0xC3,0x80,0x03,0x83,0xC3,0x80,
0x00,0x03,0x83,0x80,0x00,0x03,0x83,0x80,0x00,0x03,0x83,0x80,0x01,0x87,0x03,0xFE,
0x7F,0xCF,0x03,0xFE,0x33,0xDE,0x00,0x00,0x03,0xB8,0x00,0xC0,0x03,0xFF,0xFF,0xF0,
0x03,0xE1,0x81,0xF0,0x03,0x81,0x81,0xC0,0x03,0x81,0xC3,0xC0,0x03,0x80,0xC3,0x80,
0x03,0x80,0xE7,0x80,0x03,0x8C,0x67,0x00,0x03,0xB8,0x7F,0x00,0x03,0xF0,0x3E,0x00,
0x03,0xE0,0x3C,0x00,0x03,0xE0,0x7E,0x00,0x07,0xC0,0xFF,0x80,0x01,0x81,0xE3,0xF0,
0x00,0x07,0x81,0xFE,0x00,0x3E,0x00,0x78,0x01,0xF0,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
7:#置
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x0F,0xFF,0xFF,0xF0,
0x0F,0x1C,0x38,0xF0,0x0F,0x1C,0x38,0xF0,0x0F,0x1C,0x38,0xF0,0x0F,0x1C,0x38,0xF0,
0x0F,0xFF,0xFF,0xF0,0x0F,0x03,0x00,0xF0,0x00,0x03,0xC0,0x60,0x00,0x03,0x80,0xF0,
0x1F,0xFF,0xFF,0xF8,0x00,0x03,0x80,0x18,0x01,0x87,0x03,0x80,0x01,0xFF,0xFF,0xC0,
0x01,0xC0,0x07,0x80,0x01,0xC0,0x07,0x80,0x01,0xFF,0xFF,0x80,0x01,0xC0,0x07,0x80,
0x01,0xC0,0x07,0x80,0x01,0xC0,0x07,0x80,0x01,0xFF,0xFF,0x80,0x01,0xC0,0x07,0x80,
0x01,0xC0,0x07,0x80,0x01,0xFF,0xFF,0x80,0x01,0xC0,0x07,0x80,0x01,0xC0,0x07,0x98,
0x01,0xC0,0x07,0xBC,0xFF,0xFF,0xFF,0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
8:#配
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,
0x00,0x07,0x80,0x38,0x7F,0xFE,0xFF,0xFC,0x03,0xF8,0x00,0x38,0x03,0xF8,0x00,0x38,
0x33,0xFB,0x00,0x38,0x3F,0xFF,0x80,0x38,0x3F,0xFF,0x00,0x38,0x3F,0xFF,0x00,0x38,
0x3F,0xFF,0x00,0x38,0x3F,0xFF,0x70,0x38,0x3F,0xFF,0x7F,0xF8,0x3F,0x7F,0x78,0x38,
0x3F,0x7F,0x78,0x00,0x3E,0x3F,0x78,0x00,0x3E,0x07,0x78,0x00,0x3C,0x07,0x78,0x00,
0x3C,0x07,0x78,0x00,0x3F,0xFF,0x78,0x00,0x3C,0x07,0x78,0x0C,0x3C,0x07,0x78,0x0C,
0x3C,0x07,0x78,0x0C,0x3C,0x07,0x78,0x1C,0x3C,0x07,0x78,0x1C,0x3F,0xFF,0x78,0x1E,
0x3C,0x07,0x3F,0xFE,0x3C,0x07,0x00,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
9:#W
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x73,0xC6,
0x71,0xC6,0x71,0xCE,0x71,0xCE,0x79,0xCC,0x7B,0xCC,0x3B,0xEC,0x3B,0xFC,0x3B,0xFC,
0x3B,0xF8,0x3F,0xF8,0x3F,0xF8,0x1E,0xF8,0x1E,0xF8,0x1E,0x78,0x1E,0x70,0x1E,0x70,
0x1C,0x70,0x0C,0x70,0x0C,0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00],
10:#I
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0xFC,0x03,0xC0,
0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,
0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,
0x03,0xC0,0x03,0xC0,0x3F,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00],
11:#F
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFE,0x3C,0x1E,
0x3C,0x0E,0x3C,0x07,0x3C,0x03,0x3C,0x00,0x3C,0x38,0x3C,0x38,0x3C,0x38,0x3F,0xF8,
0x3C,0x38,0x3C,0x38,0x3C,0x38,0x3C,0x00,0x3C,0x00,0x3C,0x00,0x3C,0x00,0x3C,0x00,
0x3C,0x00,0x3C,0x00,0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00],
12:#应
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0xC0,0x00,0x00,0x00,0xE0,0x00,
0x00,0x00,0xF0,0x00,0x00,0x00,0xF0,0x38,0x0E,0x00,0x60,0x7C,0x0F,0xFF,0xFF,0xFE,
0x0E,0x00,0x00,0x00,0x0E,0x00,0x00,0x00,0x0E,0x00,0x00,0x00,0x0E,0x03,0x80,0x70,
0x0E,0xC1,0x80,0x78,0x0E,0x61,0xC0,0xF0,0x0E,0x70,0xE0,0xF0,0x0E,0x38,0xE0,0xE0,
0x0E,0x38,0xF0,0xE0,0x0E,0x3C,0x79,0xC0,0x0E,0x1C,0x79,0xC0,0x0E,0x1E,0x79,0xC0,
0x0E,0x1E,0x7B,0x80,0x0E,0x1E,0x33,0x80,0x0E,0x0E,0x03,0x00,0x1C,0x00,0x07,0x00,
0x1C,0x00,0x06,0x00,0x1C,0x00,0x0E,0x18,0x38,0x00,0x0E,0x3C,0x38,0x00,0x0C,0x7E,
0x37,0xFF,0xFF,0xC6,0x60,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
13:#用
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x70,
0x07,0xFF,0xFF,0xF8,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,
0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0xFF,0xFF,0xF0,
0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,
0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0x01,0xC0,0x70,0x07,0xFF,0xFF,0xF0,
0x0F,0x01,0xC0,0x70,0x0F,0x01,0xC0,0x70,0x0E,0x01,0xC0,0x70,0x0E,0x01,0xC0,0x70,
0x0E,0x01,0xC0,0x70,0x1C,0x01,0xC0,0x70,0x1C,0x01,0xC0,0x70,0x38,0x01,0xC0,0xF0,
0x38,0x01,0xCF,0xF0,0x70,0x01,0x81,0xE0,0x60,0x00,0x00,0xC0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
14:#指
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x83,0x00,0x00,0x03,0xE3,0xE0,0x00,
0x03,0xC3,0xC0,0xE0,0x03,0xC3,0xC1,0xF0,0x03,0xC3,0xC7,0xF0,0x03,0xC3,0xDF,0x00,
0x03,0xDF,0xF8,0x18,0x7F,0xFF,0xE0,0x18,0x03,0xC7,0xC0,0x18,0x03,0xC3,0xC0,0x18,
0x03,0xC3,0xC0,0x3C,0x03,0xCF,0xFF,0xFC,0x03,0xDC,0xFF,0xF8,0x03,0xF0,0x00,0x00,
0x03,0xC0,0x00,0x60,0x0F,0xC3,0xC0,0x78,0x3F,0xC3,0xFF,0xF8,0x7F,0xC3,0xC0,0x70,
0x33,0xC3,0xC0,0x70,0x03,0xC3,0xC0,0x70,0x03,0xC3,0xC0,0x70,0x03,0xC3,0xFF,0xF0,
0x03,0xC3,0xC0,0x70,0x03,0xC3,0xC0,0x70,0x03,0xC3,0xC0,0x70,0x03,0xC3,0xFF,0xF0,
0x3F,0xC3,0xC0,0x70,0x07,0x83,0xC0,0x70,0x03,0x03,0x80,0x60,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
15:#南
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x80,0x00,0x00,0x03,0xC0,0x00,
0x00,0x03,0xC0,0x00,0x00,0x03,0x80,0x38,0x00,0x03,0x80,0x7C,0x7F,0xFF,0xFF,0xFE,
0x00,0x03,0x80,0x00,0x00,0x03,0x80,0x00,0x0C,0x03,0x80,0x60,0x0F,0xFF,0xFF,0xF8,
0x0E,0x00,0x30,0x78,0x0E,0x30,0x38,0x70,0x0E,0x1C,0x3C,0x70,0x0E,0x1E,0x70,0x70,
0x0E,0x0E,0x70,0x70,0x0E,0x0C,0xEE,0x70,0x0E,0xFF,0xFF,0x70,0x0E,0x03,0x81,0x70,
0x0E,0x03,0x80,0x70,0x0E,0x03,0x87,0x70,0x0F,0xFF,0xFF,0xF0,0x0E,0x03,0x80,0xF0,
0x0E,0x03,0x80,0x70,0x0E,0x03,0x80,0x70,0x0E,0x03,0x80,0x70,0x0E,0x03,0x9C,0xF0,
0x0E,0x03,0x8F,0xF0,0x0E,0x03,0x81,0xE0,0x0C,0x00,0x01,0xC0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00],
16:#针
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x0C,0x00,0x03,0xC0,0x0F,0x00,
0x07,0x80,0x0E,0x00,0x07,0x80,0x0E,0x00,0x07,0x1C,0x0E,0x00,0x0F,0x3E,0x0E,0x00,
0x0F,0xF6,0x0E,0x00,0x0E,0x00,0x0E,0x00,0x1C,0x00,0x0E,0x00,0x1C,0x38,0x0E,0x1C,
0x3F,0xFC,0x0E,0x3E,0x3B,0x8F,0xFF,0xFE,0x73,0x80,0x0E,0x00,0x63,0x80,0x0E,0x00,
0x43,0x80,0x0E,0x00,0x03,0x9C,0x0E,0x00,0x7F,0xFE,0x0E,0x00,0x03,0x83,0x0E,0x00,
0x03,0x80,0x0E,0x00,0x03,0x80,0x0E,0x00,0x03,0x86,0x0E,0x00,0x03,0x9E,0x0E,0x00,
0x03,0xB8,0x0E,0x00,0x03,0xF0,0x0E,0x00,0x03,0xE0,0x0E,0x00,0x07,0xC0,0x0E,0x00,
0x03,0x80,0x0E,0x00,0x01,0x00,0x0E,0x00,0x00,0x00,0x0C,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00]
}
nums_small={
0:
[0x00,0x00,0x00,0x00,0x00,0x00,0x1E,0x00,0x3F,0x00,0x73,0x80,0x63,0x80,0xE1,0xC0,
0xE1,0xC0,0xE1,0xC0,0xE1,0xC0,0xE1,0xC0,0x63,0x80,0x73,0x80,0x3F,0x00,0x1E,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
1:
[0x00,0x00,0x00,0x00,0x00,0x00,0x0C,0x00,0x3C,0x00,0x0C,0x00,0x0C,0x00,0x0C,0x00,
0x0C,0x00,0x0C,0x00,0x0C,0x00,0x0C,0x00,0x0C,0x00,0x0C,0x00,0x0E,0x00,0x3F,0x80,
0x00,0x00,0x00,0x00,0x00,0x00,],
2:
[0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0x00,0x73,0x80,0x61,0x80,0x71,0x80,0x01,0x80,
0x03,0x80,0x07,0x00,0x0E,0x00,0x1C,0x00,0x38,0x00,0x71,0x80,0xE1,0x80,0xFF,0x80,
0x00,0x00,0x00,0x00,0x00,0x00,],
3:
[0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0x00,0x63,0x80,0x63,0x80,0x73,0x80,0x07,0x00,
0x1E,0x00,0x03,0x00,0x03,0x80,0x01,0x80,0x71,0x80,0x73,0x80,0x73,0x80,0x3F,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
4:
[0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x0F,0x00,0x0F,0x00,0x1F,0x00,0x3F,0x00,
0x77,0x00,0xE7,0x00,0xE7,0x00,0xFF,0xC0,0x07,0x00,0x07,0x00,0x07,0x00,0x1F,0x80,
0x00,0x00,0x00,0x00,0x00,0x00,],
5:
[0x00,0x00,0x00,0x00,0x00,0x00,0x7F,0x80,0x60,0x00,0x60,0x00,0x60,0x00,0x60,0x00,
0x7F,0x00,0x73,0x80,0x01,0x80,0x01,0x80,0x71,0x80,0x61,0x80,0x73,0x80,0x3F,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
6:
[0x00,0x00,0x00,0x00,0x00,0x00,0x1F,0x00,0x3B,0x80,0x73,0x80,0x60,0x00,0xE0,0x00,
0xFF,0x00,0xF3,0x80,0xE1,0xC0,0xE1,0xC0,0xE1,0xC0,0x71,0x80,0x7B,0x80,0x3F,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
7:
[0x00,0x00,0x00,0x00,0x00,0x00,0x7F,0x80,0x63,0x80,0x63,0x00,0x07,0x00,0x06,0x00,
0x0E,0x00,0x0C,0x00,0x0C,0x00,0x1C,0x00,0x1C,0x00,0x1C,0x00,0x1C,0x00,0x1C,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
8:
[0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0x00,0x73,0x80,0x61,0x80,0x61,0x80,0x71,0x80,
0x3B,0x00,0x3F,0x00,0x77,0x80,0xE3,0x80,0xE1,0x80,0xE1,0x80,0x73,0x80,0x3F,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,],
9:
[0x00,0x00,0x00,0x00,0x00,0x00,0x3E,0x00,0x73,0x80,0xE3,0x80,0xE1,0x80,0xE1,0xC0,
0xE3,0xC0,0x77,0xC0,0x3F,0x80,0x03,0x80,0x03,0x80,0x73,0x80,0x77,0x00,0x3E,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,]
}
nums={
0:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0xE0,0x0E,0x70,
0x1C,0x38,0x3C,0x3C,0x38,0x1C,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,
0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1C,0x38,0x1C,0x3C,0x3C,
0x1C,0x38,0x0E,0x70,0x07,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
1:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0x03,0xC0,
0x1F,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,
0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,0x03,0xC0,
0x03,0xC0,0x03,0xC0,0x1F,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
2:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0xF0,0x1C,0x78,
0x38,0x3C,0x78,0x3C,0x78,0x1C,0x7C,0x1C,0x38,0x1C,0x00,0x3C,0x00,0x3C,0x00,0x78,
0x00,0x70,0x00,0xE0,0x01,0xC0,0x03,0x80,0x07,0x00,0x0E,0x0E,0x1C,0x0E,0x38,0x0C,
0x70,0x1C,0x7F,0xFC,0x7F,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
3:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0F,0xE0,0x1C,0xF0,
0x38,0x78,0x38,0x3C,0x38,0x3C,0x38,0x3C,0x00,0x3C,0x00,0x78,0x00,0xF0,0x07,0xE0,
0x00,0x70,0x00,0x38,0x00,0x1C,0x00,0x1E,0x00,0x1E,0x38,0x1E,0x78,0x1E,0x78,0x1C,
0x38,0x3C,0x3C,0x78,0x0F,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
4:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70,0x00,0xF0,
0x00,0xF0,0x01,0xF0,0x01,0xF0,0x03,0xF0,0x07,0x70,0x06,0x70,0x0E,0x70,0x1C,0x70,
0x18,0x70,0x38,0x70,0x30,0x70,0x60,0x70,0xFF,0xFF,0x00,0x70,0x00,0x70,0x00,0x70,
0x00,0x70,0x00,0x70,0x00,0xF8,0x07,0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
5:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1F,0xFC,0x1F,0xFC,
0x38,0x00,0x38,0x00,0x38,0x00,0x38,0x00,0x38,0x00,0x3F,0xF0,0x3E,0x78,0x38,0x3C,
0x38,0x1C,0x00,0x1E,0x00,0x1E,0x00,0x1E,0x00,0x1E,0x38,0x1E,0x78,0x1C,0x78,0x3C,
0x38,0x3C,0x1C,0x78,0x0F,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
6:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0xF0,0x0F,0x38,
0x1C,0x3C,0x3C,0x3C,0x38,0x00,0x38,0x00,0x78,0x00,0x78,0x00,0x7B,0xF0,0x7E,0x78,
0x7C,0x3C,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x78,0x1E,0x38,0x1C,
0x3C,0x3C,0x1E,0x78,0x07,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
7:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0xFE,0x3F,0xFC,
0x38,0x1C,0x30,0x18,0x70,0x38,0x70,0x30,0x00,0x70,0x00,0x60,0x00,0xE0,0x00,0xC0,
0x01,0xC0,0x01,0xC0,0x03,0xC0,0x03,0x80,0x03,0x80,0x07,0x80,0x07,0x80,0x07,0x80,
0x07,0x80,0x07,0x80,0x07,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
8:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0F,0xE0,0x1C,0x78,
0x38,0x1C,0x78,0x1C,0x70,0x1E,0x78,0x1E,0x78,0x1C,0x3C,0x1C,0x1F,0x38,0x0F,0xE0,
0x0F,0xF0,0x3C,0xF8,0x38,0x3C,0x70,0x1C,0x70,0x1E,0x70,0x1E,0x70,0x1E,0x70,0x1E,
0x38,0x1C,0x1C,0x38,0x0F,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
9:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0F,0xE0,0x1C,0x70,
0x38,0x38,0x78,0x1C,0x78,0x1C,0x70,0x1E,0x70,0x1E,0x70,0x1E,0x78,0x1E,0x78,0x3E,
0x78,0x3E,0x3C,0x7E,0x0F,0xDE,0x00,0x1E,0x00,0x1C,0x00,0x3C,0x00,0x3C,0x3C,0x38,
0x3C,0x70,0x3C,0xE0,0x1F,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,],
}
maohao=[
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0xC0,0x07,0xC0,0x07,0xC0,
0x03,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0xC0,
0x07,0xC0,0x07,0xC0,0x03,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,]
| 61.501449
| 87
| 0.713828
| 3,824
| 21,218
| 3.960513
| 0.034257
| 0.459558
| 0.532453
| 0.592671
| 0.61961
| 0.550017
| 0.506174
| 0.468802
| 0.441004
| 0.396236
| 0
| 0.513025
| 0.100811
| 21,218
| 344
| 88
| 61.680233
| 0.28078
| 0.002404
| 0
| 0.253968
| 0
| 0
| 0
| 0
| 0
| 1
| 0.710102
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.019048
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ae1244b5cffa5810379c812ec1b2aea302bb68d
| 134
|
py
|
Python
|
src/test_pids.py
|
kelleyrw/data_science_common
|
8bbc85cbb7e5dbc5c08dc6bdcf2fe915d9856d0c
|
[
"Apache-2.0"
] | null | null | null |
src/test_pids.py
|
kelleyrw/data_science_common
|
8bbc85cbb7e5dbc5c08dc6bdcf2fe915d9856d0c
|
[
"Apache-2.0"
] | null | null | null |
src/test_pids.py
|
kelleyrw/data_science_common
|
8bbc85cbb7e5dbc5c08dc6bdcf2fe915d9856d0c
|
[
"Apache-2.0"
] | null | null | null |
import pids
def test_from_int():
assert pids.pid.from_int(15) == "F"
def test_to_int():
assert pids.pid.to_int("F") == 15
| 13.4
| 39
| 0.641791
| 24
| 134
| 3.333333
| 0.458333
| 0.175
| 0.325
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.19403
| 134
| 9
| 40
| 14.888889
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ab08d013df5efc5c6a8b1bce34d3e65b1153455b
| 58,389
|
py
|
Python
|
infoblox_netmri/api/broker/v3_7_0/device_password_log_broker.py
|
NastyaArslanova/infoblox-netmri
|
399d904399ba7958262c6f107fa3b0efdd55019b
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_7_0/device_password_log_broker.py
|
NastyaArslanova/infoblox-netmri
|
399d904399ba7958262c6f107fa3b0efdd55019b
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_7_0/device_password_log_broker.py
|
NastyaArslanova/infoblox-netmri
|
399d904399ba7958262c6f107fa3b0efdd55019b
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class DevicePasswordLogBroker(Broker):
controller = "device_password_logs"
def index(self, **kwargs):
"""Lists the available device password logs. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device password log table information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device password log table information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device password logs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device password log methods. The listed methods will be called on each device password log returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevicePwLogID
:param sort: The data field(s) to use for sorting the output. Default is DevicePwLogID. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DevicePasswordLog. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_password_logs: An array of the DevicePasswordLog objects that match the specified input criteria.
:rtype device_password_logs: Array of DevicePasswordLog
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified device password log.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device password log methods. The listed methods will be called on each device password log returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_password_log: The device password log identified by the specified DevicePwLogID.
:rtype device_password_log: DevicePasswordLog
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available device password logs matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device password log table information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device password log table information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogEnablePasswordSecure: The password is enabled for device password log.
:type DevicePwLogEnablePasswordSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogEnablePasswordSecure: The password is enabled for device password log.
:type DevicePwLogEnablePasswordSecure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogPasswordSecure: The password of the device password log.
:type DevicePwLogPasswordSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogPasswordSecure: The password of the device password log.
:type DevicePwLogPasswordSecure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogProtocol: The protocol of the device password log.
:type DevicePwLogProtocol: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogProtocol: The protocol of the device password log.
:type DevicePwLogProtocol: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPAuthPWSecure: The SNMP password is authenticated for the device password log.
:type DevicePwLogSNMPAuthPWSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPAuthPWSecure: The SNMP password is authenticated for the device password log.
:type DevicePwLogSNMPAuthPWSecure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPAuthProto: The SNMP password is authenticated for the device password log.
:type DevicePwLogSNMPAuthProto: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPAuthProto: The SNMP password is authenticated for the device password log.
:type DevicePwLogSNMPAuthProto: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPPrivPWSecure: The SNMP private password of the device password log.
:type DevicePwLogSNMPPrivPWSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPPrivPWSecure: The SNMP private password of the device password log.
:type DevicePwLogSNMPPrivPWSecure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPPrivProto: The SNMP private password protocol of the device password log.
:type DevicePwLogSNMPPrivProto: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogSNMPPrivProto: The SNMP private password protocol of the device password log.
:type DevicePwLogSNMPPrivProto: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogStatus: The status of the device password log.
:type DevicePwLogStatus: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogStatus: The status of the device password log.
:type DevicePwLogStatus: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogTimestamp: The date and time this record was collected or calculated.
:type DevicePwLogTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogTimestamp: The date and time this record was collected or calculated.
:type DevicePwLogTimestamp: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevicePwLogUsernameSecure: The username of the device password log.
:type DevicePwLogUsernameSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevicePwLogUsernameSecure: The username of the device password log.
:type DevicePwLogUsernameSecure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SecureVersion: The encryption version of the username and passwords.
:type SecureVersion: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SecureVersion: The encryption version of the username and passwords.
:type SecureVersion: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device password logs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device password log methods. The listed methods will be called on each device password log returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevicePwLogID
:param sort: The data field(s) to use for sorting the output. Default is DevicePwLogID. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DevicePasswordLog. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device password logs, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DeviceID, DevicePwLogEnablePasswordSecure, DevicePwLogID, DevicePwLogPasswordSecure, DevicePwLogProtocol, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivPWSecure, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogTimestamp, DevicePwLogUsernameSecure, SecureVersion.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_password_logs: An array of the DevicePasswordLog objects that match the specified input criteria.
:rtype device_password_logs: Array of DevicePasswordLog
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device password logs matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DeviceID, DevicePwLogEnablePasswordSecure, DevicePwLogID, DevicePwLogPasswordSecure, DevicePwLogProtocol, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivPWSecure, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogTimestamp, DevicePwLogUsernameSecure, SecureVersion.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device from which device password log table information was collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogEnablePasswordSecure: The operator to apply to the field DevicePwLogEnablePasswordSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogEnablePasswordSecure: The password is enabled for device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogEnablePasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogEnablePasswordSecure: If op_DevicePwLogEnablePasswordSecure is specified, the field named in this input will be compared to the value in DevicePwLogEnablePasswordSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogEnablePasswordSecure must be specified if op_DevicePwLogEnablePasswordSecure is specified.
:type val_f_DevicePwLogEnablePasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogEnablePasswordSecure: If op_DevicePwLogEnablePasswordSecure is specified, this value will be compared to the value in DevicePwLogEnablePasswordSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogEnablePasswordSecure must be specified if op_DevicePwLogEnablePasswordSecure is specified.
:type val_c_DevicePwLogEnablePasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogID: The operator to apply to the field DevicePwLogID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogID: The internal NetMRI identifier for the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogID: If op_DevicePwLogID is specified, the field named in this input will be compared to the value in DevicePwLogID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogID must be specified if op_DevicePwLogID is specified.
:type val_f_DevicePwLogID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogID: If op_DevicePwLogID is specified, this value will be compared to the value in DevicePwLogID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogID must be specified if op_DevicePwLogID is specified.
:type val_c_DevicePwLogID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogPasswordSecure: The operator to apply to the field DevicePwLogPasswordSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogPasswordSecure: The password of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogPasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogPasswordSecure: If op_DevicePwLogPasswordSecure is specified, the field named in this input will be compared to the value in DevicePwLogPasswordSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogPasswordSecure must be specified if op_DevicePwLogPasswordSecure is specified.
:type val_f_DevicePwLogPasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogPasswordSecure: If op_DevicePwLogPasswordSecure is specified, this value will be compared to the value in DevicePwLogPasswordSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogPasswordSecure must be specified if op_DevicePwLogPasswordSecure is specified.
:type val_c_DevicePwLogPasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogProtocol: The operator to apply to the field DevicePwLogProtocol. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogProtocol: The protocol of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogProtocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogProtocol: If op_DevicePwLogProtocol is specified, the field named in this input will be compared to the value in DevicePwLogProtocol using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogProtocol must be specified if op_DevicePwLogProtocol is specified.
:type val_f_DevicePwLogProtocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogProtocol: If op_DevicePwLogProtocol is specified, this value will be compared to the value in DevicePwLogProtocol using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogProtocol must be specified if op_DevicePwLogProtocol is specified.
:type val_c_DevicePwLogProtocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogSNMPAuthPWSecure: The operator to apply to the field DevicePwLogSNMPAuthPWSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogSNMPAuthPWSecure: The SNMP password is authenticated for the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogSNMPAuthPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogSNMPAuthPWSecure: If op_DevicePwLogSNMPAuthPWSecure is specified, the field named in this input will be compared to the value in DevicePwLogSNMPAuthPWSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogSNMPAuthPWSecure must be specified if op_DevicePwLogSNMPAuthPWSecure is specified.
:type val_f_DevicePwLogSNMPAuthPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogSNMPAuthPWSecure: If op_DevicePwLogSNMPAuthPWSecure is specified, this value will be compared to the value in DevicePwLogSNMPAuthPWSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogSNMPAuthPWSecure must be specified if op_DevicePwLogSNMPAuthPWSecure is specified.
:type val_c_DevicePwLogSNMPAuthPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogSNMPAuthProto: The operator to apply to the field DevicePwLogSNMPAuthProto. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogSNMPAuthProto: The SNMP password is authenticated for the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogSNMPAuthProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogSNMPAuthProto: If op_DevicePwLogSNMPAuthProto is specified, the field named in this input will be compared to the value in DevicePwLogSNMPAuthProto using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogSNMPAuthProto must be specified if op_DevicePwLogSNMPAuthProto is specified.
:type val_f_DevicePwLogSNMPAuthProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogSNMPAuthProto: If op_DevicePwLogSNMPAuthProto is specified, this value will be compared to the value in DevicePwLogSNMPAuthProto using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogSNMPAuthProto must be specified if op_DevicePwLogSNMPAuthProto is specified.
:type val_c_DevicePwLogSNMPAuthProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogSNMPPrivPWSecure: The operator to apply to the field DevicePwLogSNMPPrivPWSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogSNMPPrivPWSecure: The SNMP private password of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogSNMPPrivPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogSNMPPrivPWSecure: If op_DevicePwLogSNMPPrivPWSecure is specified, the field named in this input will be compared to the value in DevicePwLogSNMPPrivPWSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogSNMPPrivPWSecure must be specified if op_DevicePwLogSNMPPrivPWSecure is specified.
:type val_f_DevicePwLogSNMPPrivPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogSNMPPrivPWSecure: If op_DevicePwLogSNMPPrivPWSecure is specified, this value will be compared to the value in DevicePwLogSNMPPrivPWSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogSNMPPrivPWSecure must be specified if op_DevicePwLogSNMPPrivPWSecure is specified.
:type val_c_DevicePwLogSNMPPrivPWSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogSNMPPrivProto: The operator to apply to the field DevicePwLogSNMPPrivProto. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogSNMPPrivProto: The SNMP private password protocol of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogSNMPPrivProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogSNMPPrivProto: If op_DevicePwLogSNMPPrivProto is specified, the field named in this input will be compared to the value in DevicePwLogSNMPPrivProto using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogSNMPPrivProto must be specified if op_DevicePwLogSNMPPrivProto is specified.
:type val_f_DevicePwLogSNMPPrivProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogSNMPPrivProto: If op_DevicePwLogSNMPPrivProto is specified, this value will be compared to the value in DevicePwLogSNMPPrivProto using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogSNMPPrivProto must be specified if op_DevicePwLogSNMPPrivProto is specified.
:type val_c_DevicePwLogSNMPPrivProto: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogStatus: The operator to apply to the field DevicePwLogStatus. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogStatus: The status of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogStatus: If op_DevicePwLogStatus is specified, the field named in this input will be compared to the value in DevicePwLogStatus using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogStatus must be specified if op_DevicePwLogStatus is specified.
:type val_f_DevicePwLogStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogStatus: If op_DevicePwLogStatus is specified, this value will be compared to the value in DevicePwLogStatus using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogStatus must be specified if op_DevicePwLogStatus is specified.
:type val_c_DevicePwLogStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogTimestamp: The operator to apply to the field DevicePwLogTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogTimestamp: If op_DevicePwLogTimestamp is specified, the field named in this input will be compared to the value in DevicePwLogTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogTimestamp must be specified if op_DevicePwLogTimestamp is specified.
:type val_f_DevicePwLogTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogTimestamp: If op_DevicePwLogTimestamp is specified, this value will be compared to the value in DevicePwLogTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogTimestamp must be specified if op_DevicePwLogTimestamp is specified.
:type val_c_DevicePwLogTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevicePwLogUsernameSecure: The operator to apply to the field DevicePwLogUsernameSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevicePwLogUsernameSecure: The username of the device password log. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevicePwLogUsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevicePwLogUsernameSecure: If op_DevicePwLogUsernameSecure is specified, the field named in this input will be compared to the value in DevicePwLogUsernameSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevicePwLogUsernameSecure must be specified if op_DevicePwLogUsernameSecure is specified.
:type val_f_DevicePwLogUsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevicePwLogUsernameSecure: If op_DevicePwLogUsernameSecure is specified, this value will be compared to the value in DevicePwLogUsernameSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevicePwLogUsernameSecure must be specified if op_DevicePwLogUsernameSecure is specified.
:type val_c_DevicePwLogUsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SecureVersion: The operator to apply to the field SecureVersion. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SecureVersion: The encryption version of the username and passwords. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SecureVersion: If op_SecureVersion is specified, the field named in this input will be compared to the value in SecureVersion using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SecureVersion must be specified if op_SecureVersion is specified.
:type val_f_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SecureVersion: If op_SecureVersion is specified, this value will be compared to the value in SecureVersion using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SecureVersion must be specified if op_SecureVersion is specified.
:type val_c_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device password logs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device password log methods. The listed methods will be called on each device password log returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevicePwLogID
:param sort: The data field(s) to use for sorting the output. Default is DevicePwLogID. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DevicePasswordLog. Valid values are DevicePwLogID, DataSourceID, DeviceID, DevicePwLogTimestamp, DevicePwLogProtocol, DevicePwLogSNMPAuthProto, DevicePwLogSNMPPrivProto, DevicePwLogStatus, DevicePwLogUsernameSecure, DevicePwLogPasswordSecure, DevicePwLogEnablePasswordSecure, DevicePwLogSNMPAuthPWSecure, DevicePwLogSNMPPrivPWSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_password_logs: An array of the DevicePasswordLog objects that match the specified input criteria.
:rtype device_password_logs: Array of DevicePasswordLog
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
def infradevice(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevicePwLogID: The internal NetMRI identifier for the device password log.
:type DevicePwLogID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : InfraDevice
"""
return self.api_request(self._get_method_fullname("infradevice"), kwargs)
| 54.722587
| 730
| 0.628817
| 6,624
| 58,389
| 5.494263
| 0.043478
| 0.067594
| 0.043936
| 0.070561
| 0.944826
| 0.943617
| 0.909216
| 0.898445
| 0.887234
| 0.884157
| 0
| 0.004236
| 0.292401
| 58,389
| 1,066
| 731
| 54.773921
| 0.876634
| 0.826474
| 0
| 0
| 0
| 0
| 0.068789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.411765
| false
| 0.117647
| 0.058824
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
db4a29850476ff0c6b4dd2e489eaa03a93aab0d4
| 206
|
py
|
Python
|
cv2_plt_imshow/imshow.py
|
rs9899/cv2_plt_imshow
|
1e06bb28cd21414037b384c036c4e9f87be548bf
|
[
"MIT"
] | 2
|
2020-06-27T08:31:09.000Z
|
2020-06-28T23:07:16.000Z
|
cv2_plt_imshow/imshow.py
|
rs9899/cv2_plt_imshow
|
1e06bb28cd21414037b384c036c4e9f87be548bf
|
[
"MIT"
] | null | null | null |
cv2_plt_imshow/imshow.py
|
rs9899/cv2_plt_imshow
|
1e06bb28cd21414037b384c036c4e9f87be548bf
|
[
"MIT"
] | null | null | null |
import cv2
import matplotlib.pyplot as plt
def cv2_plt_imshow(image):
return plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
def plt_format(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
| 25.75
| 61
| 0.781553
| 32
| 206
| 4.875
| 0.4375
| 0.115385
| 0.205128
| 0.24359
| 0.397436
| 0.397436
| 0
| 0
| 0
| 0
| 0
| 0.044199
| 0.121359
| 206
| 8
| 62
| 25.75
| 0.81768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
db5b6e2056b57f53e49014ba43b90c18c2a6c204
| 176
|
py
|
Python
|
machina/core/markdown.py
|
BrendaH/django-machina
|
c75b6f39f61ca92745aebb0bb6ab3c707d88063d
|
[
"BSD-3-Clause"
] | 572
|
2015-04-10T06:15:43.000Z
|
2022-03-30T06:40:25.000Z
|
machina/core/markdown.py
|
BrendaH/django-machina
|
c75b6f39f61ca92745aebb0bb6ab3c707d88063d
|
[
"BSD-3-Clause"
] | 241
|
2015-10-26T22:23:59.000Z
|
2022-03-25T12:30:56.000Z
|
machina/core/markdown.py
|
BrendaH/django-machina
|
c75b6f39f61ca92745aebb0bb6ab3c707d88063d
|
[
"BSD-3-Clause"
] | 156
|
2015-10-02T19:32:08.000Z
|
2022-03-30T06:40:11.000Z
|
from django.utils.encoding import smart_str
from markdown2 import markdown as _markdown
def markdown(text, **kwargs):
return smart_str(_markdown(text, **kwargs).strip())
| 25.142857
| 55
| 0.772727
| 24
| 176
| 5.5
| 0.625
| 0.121212
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.125
| 176
| 6
| 56
| 29.333333
| 0.850649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
db6e03efd47eaeae23fe69d597ec02a4286eda6a
| 7,558
|
py
|
Python
|
app/companies/migrations/0001_initial.py
|
prapeller/blackemployer_api
|
ae9232773e6e164b22ffccf0b39dd9a4c2a036cf
|
[
"MIT"
] | null | null | null |
app/companies/migrations/0001_initial.py
|
prapeller/blackemployer_api
|
ae9232773e6e164b22ffccf0b39dd9a4c2a036cf
|
[
"MIT"
] | null | null | null |
app/companies/migrations/0001_initial.py
|
prapeller/blackemployer_api
|
ae9232773e6e164b22ffccf0b39dd9a4c2a036cf
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.3 on 2022-03-18 09:23
from django.conf import settings
import django.contrib.postgres.fields
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import utils.model_utils
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('content', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Case',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('seo_title', models.CharField(blank=True, max_length=100, null=True, verbose_name='SEO title')),
('seo_description', models.TextField(blank=True, max_length=400, null=True, verbose_name='SEO description')),
('seo_keywords', models.CharField(blank=True, max_length=200, null=True, verbose_name='SEO keywords')),
('slug', models.SlugField(blank=True, max_length=100, null=True, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('case_date', models.DateTimeField(blank=True, null=True)),
('case_description', models.TextField(blank=True, null=True)),
('position', models.CharField(blank=True, max_length=256, null=True)),
('position_description', models.TextField(blank=True, null=True)),
('images', django.contrib.postgres.fields.ArrayField(base_field=models.FileField(upload_to=utils.model_utils.PathAndRename('images/cases/'), validators=[django.core.validators.FileExtensionValidator(['svg', 'jpg', 'jpeg', 'png'])]), blank=True, default=utils.model_utils.default_1d_array, null=True, size=None)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('name', models.CharField(blank=True, max_length=128, null=True)),
('phones', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=128, null=True), default=utils.model_utils.default_1d_array_of_strings, size=None)),
('emails', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=128, null=True), default=utils.model_utils.default_1d_array_of_strings, size=None)),
('telegram', models.CharField(blank=True, max_length=128, null=True)),
('skype', models.CharField(blank=True, max_length=128, null=True)),
('slack', models.CharField(blank=True, max_length=128, null=True)),
('other', models.CharField(blank=True, max_length=128, null=True)),
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Company',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('seo_title', models.CharField(blank=True, max_length=100, null=True, verbose_name='SEO title')),
('seo_description', models.TextField(blank=True, max_length=400, null=True, verbose_name='SEO description')),
('seo_keywords', models.CharField(blank=True, max_length=200, null=True, verbose_name='SEO keywords')),
('title', models.CharField(max_length=120)),
('slug', models.SlugField(blank=True, max_length=100, null=True, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('website', models.URLField(blank=True, max_length=128, null=True)),
('text', models.TextField(blank=True, null=True)),
('image', models.FileField(blank=True, null=True, upload_to=utils.model_utils.PathAndRename('images/companies/'), validators=[django.core.validators.FileExtensionValidator(['svg', 'jpg', 'jpeg', 'png'])])),
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('seo_title', models.CharField(blank=True, max_length=100, null=True, verbose_name='SEO title')),
('seo_description', models.TextField(blank=True, max_length=400, null=True, verbose_name='SEO description')),
('seo_keywords', models.CharField(blank=True, max_length=200, null=True, verbose_name='SEO keywords')),
('slug', models.SlugField(blank=True, max_length=100, null=True, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=True)),
('text', models.TextField(blank=True, null=True)),
('images', django.contrib.postgres.fields.ArrayField(base_field=models.FileField(upload_to=utils.model_utils.PathAndRename('images/comments/'), validators=[django.core.validators.FileExtensionValidator(['svg', 'jpg', 'jpeg', 'png'])]), blank=True, default=utils.model_utils.default_1d_array, null=True, size=None)),
('case', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='companies.case')),
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('likes', models.ManyToManyField(related_name='comment_likes', to='content.like')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='case',
name='company',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='companies.company'),
),
migrations.AddField(
model_name='case',
name='contacts',
field=models.ManyToManyField(related_name='case_contacts', to='companies.contact'),
),
migrations.AddField(
model_name='case',
name='creator',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='case',
name='tags',
field=models.ManyToManyField(related_name='case_tags', to='content.tag'),
),
]
| 61.95082
| 331
| 0.628209
| 837
| 7,558
| 5.505376
| 0.155317
| 0.060764
| 0.054688
| 0.082031
| 0.821832
| 0.821832
| 0.749132
| 0.722873
| 0.713325
| 0.651476
| 0
| 0.015221
| 0.226383
| 7,558
| 121
| 332
| 62.46281
| 0.772875
| 0.005954
| 0
| 0.587719
| 1
| 0
| 0.109173
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db91ebf1b6ba95030a07801a1d5db4e6cad0b0cb
| 15,073
|
py
|
Python
|
tests/integration/api/v2010/account/conference/test_participant.py
|
thedoubl3j/twilio-python
|
53c6684b6f5b158962c89e2aec29cffad2023798
|
[
"MIT"
] | 2
|
2018-12-09T22:59:11.000Z
|
2018-12-09T22:59:15.000Z
|
tests/integration/api/v2010/account/conference/test_participant.py
|
ahmedabdo97/twilio-python
|
abd7477ad5f8f6df10608f56add8d217b2a0d4f1
|
[
"MIT"
] | null | null | null |
tests/integration/api/v2010/account/conference/test_participant.py
|
ahmedabdo97/twilio-python
|
abd7477ad5f8f6df10608f56add8d217b2a0d4f1
|
[
"MIT"
] | 1
|
2022-01-17T06:42:30.000Z
|
2022-01-17T06:42:30.000Z
|
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ParticipantTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Conferences/CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants/CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": true,
"call_sid_to_coach": "CAbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Conferences/CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants/CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_mute_participant_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": false,
"call_sid_to_coach": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_modify_participant_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": true,
"call_sid_to_coach": "CAbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(from_="+15017122661", to="+15558675310")
values = {'From': "+15017122661", 'To': "+15558675310", }
self.holodeck.assert_has_request(Request(
'post',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Conferences/CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants.json',
data=values,
))
def test_create_with_sid_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": false,
"call_sid_to_coach": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(from_="+15017122661", to="+15558675310")
self.assertIsNotNone(actual)
def test_create_with_friendly_name_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": false,
"call_sid_to_coach": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(from_="+15017122661", to="+15558675310")
self.assertIsNotNone(actual)
def test_create_with_sid_as_coach_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "queued",
"start_conference_on_enter": true,
"coaching": false,
"call_sid_to_coach": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(from_="+15017122661", to="+15558675310")
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Conferences/CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants/CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(call_sid="CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.list()
self.holodeck.assert_has_request(Request(
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Conferences/CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants.json',
))
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants.json?Page=0&PageSize=50",
"next_page_uri": null,
"page": 0,
"page_size": 50,
"participants": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"call_sid": "CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"conference_sid": "CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "Fri, 18 Feb 2011 21:07:19 +0000",
"date_updated": "Fri, 18 Feb 2011 21:07:19 +0000",
"end_conference_on_exit": false,
"muted": false,
"hold": false,
"status": "complete",
"start_conference_on_enter": true,
"coaching": true,
"call_sid_to_coach": "CAbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
],
"previous_page_uri": null,
"start": 0,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.list()
self.assertIsNotNone(actual)
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants.json?Page=0&PageSize=50",
"next_page_uri": null,
"page": 0,
"page_size": 50,
"participants": [],
"previous_page_uri": null,
"start": 0,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Conferences/CFaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.conferences(sid="CFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.list()
self.assertIsNotNone(actual)
| 45.954268
| 192
| 0.581835
| 1,089
| 15,073
| 7.885216
| 0.108356
| 0.017119
| 0.014906
| 0.029813
| 0.956446
| 0.946431
| 0.937929
| 0.937929
| 0.937929
| 0.937929
| 0
| 0.056245
| 0.315863
| 15,073
| 327
| 193
| 46.094801
| 0.776474
| 0.007231
| 0
| 0.807692
| 1
| 0.038462
| 0.282766
| 0.156435
| 0
| 0
| 0
| 0
| 0.146154
| 1
| 0.107692
| false
| 0
| 0.030769
| 0
| 0.146154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dba84a89a7f9812316b44ffab5a4caac2fcbb043
| 16,161
|
py
|
Python
|
ambassador/tests/t_redirect.py
|
benjaminhuo/ambassador
|
4844c862ff3eaecd191a73b915dfd021a4873f7b
|
[
"Apache-2.0"
] | null | null | null |
ambassador/tests/t_redirect.py
|
benjaminhuo/ambassador
|
4844c862ff3eaecd191a73b915dfd021a4873f7b
|
[
"Apache-2.0"
] | null | null | null |
ambassador/tests/t_redirect.py
|
benjaminhuo/ambassador
|
4844c862ff3eaecd191a73b915dfd021a4873f7b
|
[
"Apache-2.0"
] | null | null | null |
from kat.harness import Query
from abstract_tests import AmbassadorTest, HTTP
from abstract_tests import ServiceType
#####
# XXX This file is annoying.
#
# RedirectTestsWithProxyProto and RedirectTestsInvalidSecret used to be subclasses of RedirectTests,
# which makes a certain amount of sense. Problem is that when I wanted to modify just RedirectTests
# to have secrets defined, that ended up affecting the two subclasses in bad ways. There's basically
# no way to subclass an AmbassadorTest without having your base class be run separately, which isn't
# what I wanted here. Sigh.
class RedirectTests(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
def requirements(self):
# only check https urls since test readiness will only end up barfing on redirect
yield from (r for r in super().requirements() if r[0] == "url" and r[1].url.startswith("https"))
def manifests(self):
return super().manifests() + """
---
apiVersion: v1
kind: Secret
metadata:
name: redirect-cert
namespace: plain-namespace
type: kubernetes.io/tls
data:
tls.crt: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURwakNDQW82Z0F3SUJBZ0lKQUpxa1Z4Y1RtQ1FITUEwR0NTcUdTSWIzRFFFQkN3VUFNR2d4Q3pBSkJnTlYKQkFZVEFsVlRNUXN3Q1FZRFZRUUlEQUpOUVRFUE1BMEdBMVVFQnd3R1FtOXpkRzl1TVJFd0R3WURWUVFLREFoRQpZWFJoZDJseVpURVVNQklHQTFVRUN3d0xSVzVuYVc1bFpYSnBibWN4RWpBUUJnTlZCQU1NQ1d4dlkyRnNhRzl6CmREQWVGdzB4T0RFd01UQXhNREk1TURKYUZ3MHlPREV3TURjeE1ESTVNREphTUdneEN6QUpCZ05WQkFZVEFsVlQKTVFzd0NRWURWUVFJREFKTlFURVBNQTBHQTFVRUJ3d0dRbTl6ZEc5dU1SRXdEd1lEVlFRS0RBaEVZWFJoZDJseQpaVEVVTUJJR0ExVUVDd3dMUlc1bmFXNWxaWEpwYm1jeEVqQVFCZ05WQkFNTUNXeHZZMkZzYUc5emREQ0NBU0l3CkRRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0NBUW9DZ2dFQkFMcTZtdS9FSzlQc1Q0YkR1WWg0aEZPVnZiblAKekV6MGpQcnVzdXcxT05MQk9jT2htbmNSTnE4c1FyTGxBZ3NicDBuTFZmQ1pSZHQ4UnlOcUFGeUJlR29XS3IvZAprQVEybVBucjBQRHlCTzk0UHo4VHdydDBtZEtEU1dGanNxMjlOYVJaT0JqdStLcGV6RytOZ3pLMk04M0ZtSldUCnFYdTI3ME9pOXlqb2VGQ3lPMjdwUkdvcktkQk9TcmIwd3ozdFdWUGk4NFZMdnFKRWprT0JVZjJYNVF3b25XWngKMktxVUJ6OUFSZVVUMzdwUVJZQkJMSUdvSnM4U042cjF4MSt1dTNLdTVxSkN1QmRlSHlJbHpKb2V0aEp2K3pTMgowN0pFc2ZKWkluMWNpdXhNNzNPbmVRTm1LUkpsL2NEb3BLemswSldRSnRSV1NnbktneFNYWkRrZjJMOENBd0VBCkFhTlRNRkV3SFFZRFZSME9CQllFRkJoQzdDeVRpNGFkSFVCd0wvTkZlRTZLdnFIRE1COEdBMVVkSXdRWU1CYUEKRkJoQzdDeVRpNGFkSFVCd0wvTkZlRTZLdnFIRE1BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTgpBUUVMQlFBRGdnRUJBSFJvb0xjcFdEa1IyMEhENEJ5d1BTUGRLV1hjWnN1U2tXYWZyekhoYUJ5MWJZcktIR1o1CmFodFF3L1gwQmRnMWtidlpZUDJSTzdGTFhBSlNTdXVJT0NHTFVwS0pkVHE1NDREUThNb1daWVZKbTc3UWxxam0KbHNIa2VlTlRNamFOVjdMd0MzalBkMERYelczbGVnWFRoYWpmZ2dtLzBJZXNGRzBVWjFEOTJHNURmc0hLekpSagpNSHZyVDNtVmJGZjkrSGJhRE4yT2g5VjIxUWhWSzF2M0F2dWNXczhUWCswZHZFZ1dtWHBRcndEd2pTMU04QkRYCldoWjVsZTZjVzhNYjhnZmRseG1JckpnQStuVVZzMU9EbkJKS1F3MUY4MVdkc25tWXdweVUrT2xVais4UGt1TVoKSU4rUlhQVnZMSWJ3czBmamJ4UXRzbTArZVBpRnN2d0NsUFk9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
tls.key: LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRQzZ1cHJ2eEN2VDdFK0cKdzdtSWVJUlRsYjI1ejh4TTlJejY3ckxzTlRqU3dUbkRvWnAzRVRhdkxFS3k1UUlMRzZkSnkxWHdtVVhiZkVjagphZ0JjZ1hocUZpcS8zWkFFTnBqNTY5RHc4Z1R2ZUQ4L0U4SzdkSm5TZzBsaFk3S3R2VFdrV1RnWTd2aXFYc3h2CmpZTXl0alBOeFppVms2bDd0dTlEb3ZjbzZIaFFzanR1NlVScUt5blFUa3EyOU1NOTdWbFQ0dk9GUzc2aVJJNUQKZ1ZIOWwrVU1LSjFtY2RpcWxBYy9RRVhsRTkrNlVFV0FRU3lCcUNiUEVqZXE5Y2RmcnJ0eXJ1YWlRcmdYWGg4aQpKY3lhSHJZU2IvczB0dE95UkxIeVdTSjlYSXJzVE85enAza0RaaWtTWmYzQTZLU3M1TkNWa0NiVVZrb0p5b01VCmwyUTVIOWkvQWdNQkFBRUNnZ0VBSVFsZzNpamNCRHViK21Eb2syK1hJZDZ0V1pHZE9NUlBxUm5RU0NCR2RHdEIKV0E1Z2NNNTMyVmhBV0x4UnR6dG1ScFVXR0dKVnpMWlpNN2ZPWm85MWlYZHdpcytkYWxGcWtWVWFlM2FtVHVQOApkS0YvWTRFR3Nnc09VWSs5RGlZYXRvQWVmN0xRQmZ5TnVQTFZrb1JQK0FrTXJQSWFHMHhMV3JFYmYzNVp3eFRuCnd5TTF3YVpQb1oxWjZFdmhHQkxNNzlXYmY2VFY0WXVzSTRNOEVQdU1GcWlYcDNlRmZ4L0tnNHhtYnZtN1JhYzcKOEJ3Z3pnVmljNXlSbkVXYjhpWUh5WGtyazNTL0VCYUNEMlQwUjM5VmlVM1I0VjBmMUtyV3NjRHowVmNiVWNhKwpzeVdyaVhKMHBnR1N0Q3FWK0dRYy9aNmJjOGt4VWpTTWxOUWtudVJRZ1FLQmdRRHpwM1ZaVmFzMTA3NThVT00rCnZUeTFNL0V6azg4cWhGb21kYVFiSFRlbStpeGpCNlg3RU9sRlkya3JwUkwvbURDSEpwR0MzYlJtUHNFaHVGSUwKRHhSQ2hUcEtTVmNsSytaaUNPaWE1ektTVUpxZnBOcW15RnNaQlhJNnRkNW9mWk42aFpJVTlJR2RUaGlYMjBONwppUW01UnZlSUx2UHVwMWZRMmRqd2F6Ykgvd0tCZ1FERU1MN21Mb2RqSjBNTXh6ZnM3MW1FNmZOUFhBMVY2ZEgrCllCVG4xS2txaHJpampRWmFNbXZ6dEZmL1F3Wkhmd3FKQUVuNGx2em5ncUNzZTMvUElZMy8zRERxd1p2NE1vdy8KRGdBeTBLQmpQYVJGNjhYT1B1d0VuSFN1UjhyZFg2UzI3TXQ2cEZIeFZ2YjlRRFJuSXc4a3grSFVreml4U0h5Ugo2NWxESklEdlFRS0JnUURpQTF3ZldoQlBCZk9VYlpQZUJydmhlaVVycXRob29BemYwQkJCOW9CQks1OHczVTloCjdQWDFuNWxYR3ZEY2x0ZXRCbUhEK3RQMFpCSFNyWit0RW5mQW5NVE5VK3E2V0ZhRWFhOGF3WXR2bmNWUWdTTXgKd25oK1pVYm9udnVJQWJSajJyTC9MUzl1TTVzc2dmKy9BQWM5RGs5ZXkrOEtXY0Jqd3pBeEU4TGxFUUtCZ0IzNwoxVEVZcTFoY0I4Tk1MeC9tOUtkN21kUG5IYUtqdVpSRzJ1c1RkVWNxajgxdklDbG95MWJUbVI5Si93dXVQczN4ClhWekF0cVlyTUtNcnZMekxSQWgyZm9OaVU1UDdKYlA5VDhwMFdBN1N2T2h5d0NobE5XeisvRlltWXJxeWcxbngKbHFlSHRYNU03REtJUFhvRndhcTlZYVk3V2M2K1pVdG4xbVNNajZnQkFvR0JBSTgwdU9iTkdhRndQTVYrUWhiZApBelkrSFNGQjBkWWZxRytzcTBmRVdIWTNHTXFmNFh0aVRqUEFjWlg3RmdtT3Q5Uit3TlFQK0dFNjZoV0JpKzBWCmVLV3prV0lXeS9sTVZCSW0zVWtlSlRCT3NudTFVaGhXbm5WVDhFeWhEY1FxcndPSGlhaUo3bFZSZmRoRWFyQysKSnpaU0czOHVZUVlyc0lITnRVZFgySmdPCi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K
---
apiVersion: v1
kind: Secret
metadata:
name: redirect-cert
type: kubernetes.io/tls
data:
tls.crt: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURwakNDQW82Z0F3SUJBZ0lKQUpxa1Z4Y1RtQ1FITUEwR0NTcUdTSWIzRFFFQkN3VUFNR2d4Q3pBSkJnTlYKQkFZVEFsVlRNUXN3Q1FZRFZRUUlEQUpOUVRFUE1BMEdBMVVFQnd3R1FtOXpkRzl1TVJFd0R3WURWUVFLREFoRQpZWFJoZDJseVpURVVNQklHQTFVRUN3d0xSVzVuYVc1bFpYSnBibWN4RWpBUUJnTlZCQU1NQ1d4dlkyRnNhRzl6CmREQWVGdzB4T0RFd01UQXhNREk1TURKYUZ3MHlPREV3TURjeE1ESTVNREphTUdneEN6QUpCZ05WQkFZVEFsVlQKTVFzd0NRWURWUVFJREFKTlFURVBNQTBHQTFVRUJ3d0dRbTl6ZEc5dU1SRXdEd1lEVlFRS0RBaEVZWFJoZDJseQpaVEVVTUJJR0ExVUVDd3dMUlc1bmFXNWxaWEpwYm1jeEVqQVFCZ05WQkFNTUNXeHZZMkZzYUc5emREQ0NBU0l3CkRRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0NBUW9DZ2dFQkFMcTZtdS9FSzlQc1Q0YkR1WWg0aEZPVnZiblAKekV6MGpQcnVzdXcxT05MQk9jT2htbmNSTnE4c1FyTGxBZ3NicDBuTFZmQ1pSZHQ4UnlOcUFGeUJlR29XS3IvZAprQVEybVBucjBQRHlCTzk0UHo4VHdydDBtZEtEU1dGanNxMjlOYVJaT0JqdStLcGV6RytOZ3pLMk04M0ZtSldUCnFYdTI3ME9pOXlqb2VGQ3lPMjdwUkdvcktkQk9TcmIwd3ozdFdWUGk4NFZMdnFKRWprT0JVZjJYNVF3b25XWngKMktxVUJ6OUFSZVVUMzdwUVJZQkJMSUdvSnM4U042cjF4MSt1dTNLdTVxSkN1QmRlSHlJbHpKb2V0aEp2K3pTMgowN0pFc2ZKWkluMWNpdXhNNzNPbmVRTm1LUkpsL2NEb3BLemswSldRSnRSV1NnbktneFNYWkRrZjJMOENBd0VBCkFhTlRNRkV3SFFZRFZSME9CQllFRkJoQzdDeVRpNGFkSFVCd0wvTkZlRTZLdnFIRE1COEdBMVVkSXdRWU1CYUEKRkJoQzdDeVRpNGFkSFVCd0wvTkZlRTZLdnFIRE1BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTgpBUUVMQlFBRGdnRUJBSFJvb0xjcFdEa1IyMEhENEJ5d1BTUGRLV1hjWnN1U2tXYWZyekhoYUJ5MWJZcktIR1o1CmFodFF3L1gwQmRnMWtidlpZUDJSTzdGTFhBSlNTdXVJT0NHTFVwS0pkVHE1NDREUThNb1daWVZKbTc3UWxxam0KbHNIa2VlTlRNamFOVjdMd0MzalBkMERYelczbGVnWFRoYWpmZ2dtLzBJZXNGRzBVWjFEOTJHNURmc0hLekpSagpNSHZyVDNtVmJGZjkrSGJhRE4yT2g5VjIxUWhWSzF2M0F2dWNXczhUWCswZHZFZ1dtWHBRcndEd2pTMU04QkRYCldoWjVsZTZjVzhNYjhnZmRseG1JckpnQStuVVZzMU9EbkJKS1F3MUY4MVdkc25tWXdweVUrT2xVais4UGt1TVoKSU4rUlhQVnZMSWJ3czBmamJ4UXRzbTArZVBpRnN2d0NsUFk9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
tls.key: LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRQzZ1cHJ2eEN2VDdFK0cKdzdtSWVJUlRsYjI1ejh4TTlJejY3ckxzTlRqU3dUbkRvWnAzRVRhdkxFS3k1UUlMRzZkSnkxWHdtVVhiZkVjagphZ0JjZ1hocUZpcS8zWkFFTnBqNTY5RHc4Z1R2ZUQ4L0U4SzdkSm5TZzBsaFk3S3R2VFdrV1RnWTd2aXFYc3h2CmpZTXl0alBOeFppVms2bDd0dTlEb3ZjbzZIaFFzanR1NlVScUt5blFUa3EyOU1NOTdWbFQ0dk9GUzc2aVJJNUQKZ1ZIOWwrVU1LSjFtY2RpcWxBYy9RRVhsRTkrNlVFV0FRU3lCcUNiUEVqZXE5Y2RmcnJ0eXJ1YWlRcmdYWGg4aQpKY3lhSHJZU2IvczB0dE95UkxIeVdTSjlYSXJzVE85enAza0RaaWtTWmYzQTZLU3M1TkNWa0NiVVZrb0p5b01VCmwyUTVIOWkvQWdNQkFBRUNnZ0VBSVFsZzNpamNCRHViK21Eb2syK1hJZDZ0V1pHZE9NUlBxUm5RU0NCR2RHdEIKV0E1Z2NNNTMyVmhBV0x4UnR6dG1ScFVXR0dKVnpMWlpNN2ZPWm85MWlYZHdpcytkYWxGcWtWVWFlM2FtVHVQOApkS0YvWTRFR3Nnc09VWSs5RGlZYXRvQWVmN0xRQmZ5TnVQTFZrb1JQK0FrTXJQSWFHMHhMV3JFYmYzNVp3eFRuCnd5TTF3YVpQb1oxWjZFdmhHQkxNNzlXYmY2VFY0WXVzSTRNOEVQdU1GcWlYcDNlRmZ4L0tnNHhtYnZtN1JhYzcKOEJ3Z3pnVmljNXlSbkVXYjhpWUh5WGtyazNTL0VCYUNEMlQwUjM5VmlVM1I0VjBmMUtyV3NjRHowVmNiVWNhKwpzeVdyaVhKMHBnR1N0Q3FWK0dRYy9aNmJjOGt4VWpTTWxOUWtudVJRZ1FLQmdRRHpwM1ZaVmFzMTA3NThVT00rCnZUeTFNL0V6azg4cWhGb21kYVFiSFRlbStpeGpCNlg3RU9sRlkya3JwUkwvbURDSEpwR0MzYlJtUHNFaHVGSUwKRHhSQ2hUcEtTVmNsSytaaUNPaWE1ektTVUpxZnBOcW15RnNaQlhJNnRkNW9mWk42aFpJVTlJR2RUaGlYMjBONwppUW01UnZlSUx2UHVwMWZRMmRqd2F6Ykgvd0tCZ1FERU1MN21Mb2RqSjBNTXh6ZnM3MW1FNmZOUFhBMVY2ZEgrCllCVG4xS2txaHJpampRWmFNbXZ6dEZmL1F3Wkhmd3FKQUVuNGx2em5ncUNzZTMvUElZMy8zRERxd1p2NE1vdy8KRGdBeTBLQmpQYVJGNjhYT1B1d0VuSFN1UjhyZFg2UzI3TXQ2cEZIeFZ2YjlRRFJuSXc4a3grSFVreml4U0h5Ugo2NWxESklEdlFRS0JnUURpQTF3ZldoQlBCZk9VYlpQZUJydmhlaVVycXRob29BemYwQkJCOW9CQks1OHczVTloCjdQWDFuNWxYR3ZEY2x0ZXRCbUhEK3RQMFpCSFNyWit0RW5mQW5NVE5VK3E2V0ZhRWFhOGF3WXR2bmNWUWdTTXgKd25oK1pVYm9udnVJQWJSajJyTC9MUzl1TTVzc2dmKy9BQWM5RGs5ZXkrOEtXY0Jqd3pBeEU4TGxFUUtCZ0IzNwoxVEVZcTFoY0I4Tk1MeC9tOUtkN21kUG5IYUtqdVpSRzJ1c1RkVWNxajgxdklDbG95MWJUbVI5Si93dXVQczN4ClhWekF0cVlyTUtNcnZMekxSQWgyZm9OaVU1UDdKYlA5VDhwMFdBN1N2T2h5d0NobE5XeisvRlltWXJxeWcxbngKbHFlSHRYNU03REtJUFhvRndhcTlZYVk3V2M2K1pVdG4xbVNNajZnQkFvR0JBSTgwdU9iTkdhRndQTVYrUWhiZApBelkrSFNGQjBkWWZxRytzcTBmRVdIWTNHTXFmNFh0aVRqUEFjWlg3RmdtT3Q5Uit3TlFQK0dFNjZoV0JpKzBWCmVLV3prV0lXeS9sTVZCSW0zVWtlSlRCT3NudTFVaGhXbm5WVDhFeWhEY1FxcndPSGlhaUo3bFZSZmRoRWFyQysKSnpaU0czOHVZUVlyc0lITnRVZFgySmdPCi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K
"""
def config(self):
# Use self here, not self.target, because we want the TLS module to
# be annotated on the Ambassador itself.
yield self, self.format("""
---
apiVersion: ambassador/v1
kind: Module
name: tls
ambassador_id: {self.ambassador_id}
config:
server:
enabled: True
secret: redirect-cert
redirect_cleartext_from: 8080
""")
yield self.target, self.format("""
---
apiVersion: ambassador/v1
kind: Mapping
name: tls_target_mapping
prefix: /tls-target/
service: {self.target.path.fqdn}
""")
def queries(self):
# [0]
yield Query(self.url("tls-target/", scheme="http"), expected=301)
# [1] -- PHASE 2
yield Query(self.url("ambassador/v0/diag/?json=true&filter=errors",
scheme="https"),
insecure=True,
phase=2)
def check(self):
# For query 0, check the redirection target.
assert len(self.results[0].headers['Location']) > 0
assert self.results[0].headers['Location'][0].find('/tls-target/') > 0
# For query 1, we require no errors.
# XXX Ew. If self.results[1].json is empty, the harness won't convert it to a response.
errors = self.results[1].json
assert(len(errors) == 0)
class RedirectTestsWithProxyProto(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
def requirements(self):
# only check https urls since test readiness will only end up barfing on redirect
yield from (r for r in super().requirements() if r[0] == "url" and r[1].url.startswith("https"))
def config(self):
yield self, self.format("""
---
apiVersion: ambassador/v0
kind: Module
name: ambassador
config:
use_proxy_proto: true
enable_ipv6: true
""")
yield self.target, self.format("""
---
apiVersion: ambassador/v1
kind: Mapping
name: tls_target_mapping
prefix: /tls-target/
service: {self.target.path.fqdn}
""")
def queries(self):
# TODO (concaf): FWIW, this query only covers one side of the story. This tests that this is the correct
# deviation from the normal behavior (301 response), but does not test a 301 when proxy proto is actually sent.
# This is because net/http does not yet support adding proxy proto to HTTP requests, and hence it's difficult
# to test with kat. We will need to open a raw TCP connection (e.g. telnet/nc) and send the entire HTTP Request
# in plaintext to test this behavior (or use curl with --haproxy-protocol).
yield Query(self.url("tls-target/"), error="EOF")
# We can't do the error check until we have the PROXY client mentioned above.
# # [1] -- PHASE 2
# yield Query(self.url("ambassador/v0/diag/?json=true&filter=errors"), phase=2)
#
# def check(self):
# # We don't have to check anything about query 0, the "expected" clause is enough.
#
# # For query 1, we require no errors.
# # XXX Ew. If self.results[1].json is empty, the harness won't convert it to a response.
# errors = self.results[1].json
# assert(len(errors) == 0)
class RedirectTestsInvalidSecret(AmbassadorTest):
"""
This test tests that even if the specified secret is invalid, the rest of TLS Context should
go through. In this case, even though the secret does not exist, redirect_cleartext_from
should still take effect.
"""
target: ServiceType
def init(self):
self.target = HTTP()
def requirements(self):
# only check https urls since test readiness will only end up barfing on redirect
yield from (r for r in super().requirements() if r[0] == "url" and r[1].url.startswith("https"))
def config(self):
yield self, self.format("""
---
apiVersion: ambassador/v1
kind: Module
name: tls
ambassador_id: {self.ambassador_id}
config:
server:
enabled: True
secret: does-not-exist-secret
redirect_cleartext_from: 8080
""")
yield self.target, self.format("""
---
apiVersion: ambassador/v1
kind: Mapping
name: tls_target_mapping
prefix: /tls-target/
service: {self.target.path.fqdn}
""")
def queries(self):
# [0]
yield Query(self.url("tls-target/"), expected=301)
# There's kind of no way to do this. Looks like we need to speak HTTP to the port on which we
# think the server is listening for HTTPS? This is a bad config all the way around, really.
# # [1] -- PHASE 2
# yield Query(self.url("ambassador/v0/diag/?json=true&filter=errors", scheme="https"), phase=2)
#
# def check(self):
# # We don't have to check anything about query 0, the "expected" clause is enough.
#
# # For query 1, we require no errors.
# # XXX Ew. If self.results[1].json is empty, the harness won't convert it to a response.
# errors = self.results[1].json
# assert(len(errors) == 0)
class XFPRedirect(AmbassadorTest):
parent: AmbassadorTest
target: ServiceType
def init(self):
self.target = HTTP()
def config(self):
yield self.target, self.format("""
---
apiVersion: ambassador/v0
kind: Module
name: ambassador
config:
x_forwarded_proto_redirect: true
use_remote_address: false
---
apiVersion: ambassador/v0
kind: Mapping
name: {self.name}
prefix: /{self.name}/
service: {self.target.path.fqdn}
""")
def queries(self):
# [0]
yield Query(self.url(self.name + "/target/"), headers={ "X-Forwarded-Proto": "http" }, expected=301)
# [1]
yield Query(self.url(self.name + "/target/"), headers={ "X-Forwarded-Proto": "https" }, expected=200)
# [2] -- PHASE 2
yield Query(self.url("ambassador/v0/diag/?json=true&filter=errors"), headers={ "X-Forwarded-Proto": "https" }, phase=2)
def check(self):
# For query 0, check the redirection target.
expected_location = ["https://" + self.path.fqdn + "/" + self.name + "/target/"]
actual_location = self.results[0].headers['Location']
assert actual_location == expected_location, "Expected redirect location to be {}, got {} instead".format(
expected_location,
actual_location
)
# For query 1, we don't have to check anything, the "expected" clause is enough.
# For query 2, we require no errors.
# XXX Ew. If self.results[2].json is empty, the harness won't convert it to a response.
errors = self.results[2].json
assert(len(errors) == 0)
def requirements(self):
yield ("url", Query(self.url("ambassador/v0/check_ready"), headers={"X-Forwarded-Proto": "https"}))
yield ("url", Query(self.url("ambassador/v0/check_alive"), headers={"X-Forwarded-Proto": "https"}))
| 62.883268
| 2,283
| 0.825939
| 1,112
| 16,161
| 11.973022
| 0.228417
| 0.009764
| 0.009914
| 0.011492
| 0.850834
| 0.840469
| 0.833934
| 0.828451
| 0.815983
| 0.813354
| 0
| 0.07396
| 0.114844
| 16,161
| 256
| 2,284
| 63.128906
| 0.856763
| 0.187674
| 0
| 0.729032
| 0
| 0
| 0.760086
| 0.644577
| 0
| 1
| 0
| 0.003906
| 0.032258
| 1
| 0.122581
| false
| 0
| 0.019355
| 0.006452
| 0.206452
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
dbb950071dd42f945ac0d56f330430ed4478f1d3
| 15,216
|
py
|
Python
|
gcloud/tests/taskflow3/dispatchers/node/node_command_dispatcher/test_get_node_data_v2.py
|
chenrb/bk-sops
|
bed16e9473ba45793b7f45620d8cd6f1ae16ac5d
|
[
"Apache-2.0"
] | null | null | null |
gcloud/tests/taskflow3/dispatchers/node/node_command_dispatcher/test_get_node_data_v2.py
|
chenrb/bk-sops
|
bed16e9473ba45793b7f45620d8cd6f1ae16ac5d
|
[
"Apache-2.0"
] | null | null | null |
gcloud/tests/taskflow3/dispatchers/node/node_command_dispatcher/test_get_node_data_v2.py
|
chenrb/bk-sops
|
bed16e9473ba45793b7f45620d8cd6f1ae16ac5d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from gcloud import err_code
from gcloud.taskflow3.dispatchers.node import NodeCommandDispatcher
from pipeline.eri.models import ExecutionData
from gcloud.tests.mock import * # noqa
from gcloud.tests.mock_settings import * # noqa
class GetNodeDataV2TestCase(TestCase):
def test_non_act_not_started(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = None
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "StartEvent"})
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
dispatcher._get_node_info.assert_called_once_with(
node_id=dispatcher.node_id, pipeline=pipeline_instance.execution_data, subprocess_stack=subprocess_stack
)
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": {}, "outputs": [], "ex_data": ""},
"message": "",
"code": err_code.SUCCESS.code,
},
)
def test_act_not_started(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = None
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "ServiceActivity"})
pre_render_inputs = "inputs"
pre_render_outputs = {"ex_data": "ex_data"}
dispatcher._prerender_node_data = MagicMock(return_value=(True, None, pre_render_inputs, pre_render_outputs))
format_outputs = "format_outputs"
dispatcher._format_outputs = MagicMock(return_value=(True, None, format_outputs))
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
dispatcher._get_node_info.assert_called_once_with(
node_id=dispatcher.node_id, pipeline=pipeline_instance.execution_data, subprocess_stack=subprocess_stack
)
dispatcher._prerender_node_data.assert_called_once_with(
pipeline_instance=pipeline_instance, subprocess_stack=subprocess_stack, username=username
)
dispatcher._format_outputs.assert_called_once_with(
outputs=pre_render_outputs,
component_code=component_code,
pipeline_instance=pipeline_instance,
subprocess_stack=["1"],
)
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": pre_render_inputs, "outputs": format_outputs, "ex_data": "ex_data"},
"message": "",
"code": err_code.SUCCESS.code,
},
)
def test_node_started_loop_is_none(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = None
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = {"loop": 1}
get_execution_data_return = MagicMock()
get_execution_data_return.result = True
get_execution_data_return.data = {"inputs": "inputs", "outputs": {}}
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
bamboo_api.get_execution_data = MagicMock(return_value=get_execution_data_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "ServiceActivity"})
dispatcher._prerender_node_data = MagicMock()
format_outputs = "format_outputs"
dispatcher._format_outputs = MagicMock(return_value=(True, None, format_outputs))
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
bamboo_api.get_execution_data.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
dispatcher._get_node_info.assert_not_called()
dispatcher._prerender_node_data.assert_not_called()
dispatcher._format_outputs.assert_called_once_with(
outputs={"outputs": {}},
component_code=component_code,
pipeline_instance=pipeline_instance,
subprocess_stack=["1"],
)
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": "inputs", "outputs": format_outputs, "ex_data": None},
"message": "",
"code": err_code.SUCCESS.code,
},
)
def test_node_started_loop_is_latest(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 2
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = {"loop": 1}
get_execution_data_return = MagicMock()
get_execution_data_return.result = True
get_execution_data_return.data = {"inputs": "inputs", "outputs": {}}
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
bamboo_api.get_execution_data = MagicMock(return_value=get_execution_data_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "ServiceActivity"})
dispatcher._prerender_node_data = MagicMock()
format_outputs = "format_outputs"
dispatcher._format_outputs = MagicMock(return_value=(True, None, format_outputs))
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
bamboo_api.get_execution_data.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
dispatcher._get_node_info.assert_not_called()
dispatcher._prerender_node_data.assert_not_called()
dispatcher._format_outputs.assert_called_once_with(
outputs={"outputs": {}},
component_code=component_code,
pipeline_instance=pipeline_instance,
subprocess_stack=["1"],
)
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": "inputs", "outputs": format_outputs, "ex_data": None},
"message": "",
"code": err_code.SUCCESS.code,
},
)
def test_node_started_execution_data_not_exist(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 2
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = {"loop": 1}
get_execution_data_return = MagicMock()
get_execution_data_return.result = False
get_execution_data_return.exc = ExecutionData.DoesNotExist()
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
bamboo_api.get_execution_data = MagicMock(return_value=get_execution_data_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "ServiceActivity"})
dispatcher._prerender_node_data = MagicMock()
dispatcher._format_outputs = MagicMock()
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
bamboo_api.get_execution_data.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
dispatcher._get_node_info.assert_not_called()
dispatcher._prerender_node_data.assert_not_called()
dispatcher._format_outputs.assert_not_called()
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": {}, "outputs": [], "ex_data": ""},
"message": "",
"code": err_code.SUCCESS.code,
},
)
def test_node_started_loop_is_not_latest(self):
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
pipeline_instance = MagicMock()
kwargs = {"pipeline_instance": pipeline_instance}
runtime = "runtime"
runtime_init = MagicMock(return_value=runtime)
bamboo_api = MagicMock()
get_children_states_return = MagicMock()
get_children_states_return.result = True
get_children_states_return.data = {"loop": 2}
get_node_histories_return = MagicMock()
get_node_histories_return.result = True
get_node_histories_return.data = [{"inputs": "inputs", "outputs": {}}]
bamboo_api.get_children_states = MagicMock(return_value=get_children_states_return)
bamboo_api.get_node_histories = MagicMock(return_value=get_node_histories_return)
dispatcher = NodeCommandDispatcher(engine_ver=2, node_id="node_id")
dispatcher._get_node_info = MagicMock(return_value={"type": "ServiceActivity"})
dispatcher._prerender_node_data = MagicMock()
format_outputs = "format_outputs"
dispatcher._format_outputs = MagicMock(return_value=(True, None, format_outputs))
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_RUNTIME, runtime_init):
with patch(TASKFLOW_DISPATCHERS_NODE_BAMBOO_API, bamboo_api):
node_data = dispatcher.get_node_data_v2(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
**kwargs
)
bamboo_api.get_children_states.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id)
bamboo_api.get_node_histories.assert_called_once_with(runtime=runtime, node_id=dispatcher.node_id, loop=loop)
dispatcher._get_node_info.assert_not_called()
dispatcher._prerender_node_data.assert_not_called()
dispatcher._format_outputs.assert_called_once_with(
outputs={"outputs": {}},
component_code=component_code,
pipeline_instance=pipeline_instance,
subprocess_stack=["1"],
)
self.assertEqual(
node_data,
{
"result": True,
"data": {"inputs": "inputs", "outputs": format_outputs, "ex_data": None},
"message": "",
"code": err_code.SUCCESS.code,
},
)
| 44.491228
| 117
| 0.654837
| 1,611
| 15,216
| 5.767846
| 0.101179
| 0.036806
| 0.065863
| 0.059406
| 0.869673
| 0.853853
| 0.845351
| 0.845351
| 0.83997
| 0.83997
| 0
| 0.003912
| 0.260844
| 15,216
| 341
| 118
| 44.621701
| 0.822264
| 0.048107
| 0
| 0.786441
| 0
| 0
| 0.06066
| 0
| 0
| 0
| 0
| 0
| 0.108475
| 1
| 0.020339
| false
| 0
| 0.020339
| 0
| 0.044068
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbd863f9cc649183a1ffe3e5c308c926f71d4dc4
| 6,948
|
py
|
Python
|
ruleex/deepred/model.py
|
rohancode/ruleex_modified
|
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
|
[
"Apache-2.0"
] | null | null | null |
ruleex/deepred/model.py
|
rohancode/ruleex_modified
|
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
|
[
"Apache-2.0"
] | null | null | null |
ruleex/deepred/model.py
|
rohancode/ruleex_modified
|
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
|
[
"Apache-2.0"
] | null | null | null |
from gtrain import FCNet
import numpy as np
import tensorflow as tf
from gtrain.model import TextCNN
class DeepRedFCNet(FCNet):
"""
Model of the fully connected net with its evaluation.
The binary sub-domain output is also supported by function eval_binary_class.
The initialization of the weights is done by finishing the training process by gtrain or by call init_eval_weights.
"""
def init_eval_weights(self, weights):
self.eval_session = None
self.weights = weights
def __del__(self):
if self.eval_session:
self.eval_session.close()
def __eval(self, tensor_str, x):
x = np.float32(x)
if not self.eval_session:
self.eval_session = tf.Session()
with self.eval_session.as_default():
self.build_for_eval()
self.eval_session.run(tf.global_variables_initializer())
return self.eval_session.run(eval(tensor_str), {self.x_for_eval: x})
def eval(self, x):
return self.__eval("self.out_for_eval", x)
def eval_layers(self, x):
return self.__eval("self.layers", x)
def eval_binary_class(self, x, class_index):
"""
evaluate network that have two dimensional softmax output computed from original specified class output against
highest output of the other classes
:param x:
:param class_index:
:return:
"""
return self.__eval("self.out_for_class_eval[{}]".format(class_index), x)
def build_for_eval(self):
with tf.name_scope("Input"):
self.x_for_eval = tf.placeholder(tf.float32, shape=[None, self.input_size], name="Input...")
with tf.name_scope("FC_net"):
flowing_x = self.x_for_eval
self.layers = [flowing_x]
c=0
for i in range(3):
with tf.name_scope("layer_{}".format(i)):
W = tf.constant(self.weights[c], name="Weights_{}".format(i))
c=c+1
b = tf.constant(self.weights[c], name="Biases_{}".format(i))
c=c+1
# for i in range(len(self.weights[0])):
# with tf.name_scope("layer_{}".format(i)):
# W = tf.constant(self.weights[0][i], name="Weights_{}".format(i))
# b = tf.constant(self.weights[1][i], name="Biases_{}".format(i))
flowing_x = self.activation_function(tf.nn.xw_plus_b(flowing_x, W, b))
self.layers.append(flowing_x)
y = flowing_x
with tf.name_scope("Output"):
self.out_for_eval = tf.nn.softmax(y)
self.layers.append(self.out_for_eval)
with tf.name_scope("Binary_class_output"):
self.out_for_class_eval = list()
for i in range(self.layer_sizes[-1]):
mask = True+np.zeros(self.layer_sizes[-1], dtype=np.bool)
mask[i] = False
out = tf.nn.softmax(tf.stack([
self.out_for_eval[:,i],
tf.reduce_max(
tf.boolean_mask(self.out_for_eval, mask, axis=1), axis=1)
], axis=1))
self.out_for_class_eval.append(out)
def train_ended(self, session):
super().train_ended(session)
self.init_eval_weights(weights=[self.trained_W, self.trained_b])
def name(self):
return "FC_net_for_deepred_{}".format("-".join([str(ls) for ls in self.layer_sizes]))
class DeepRedTextCNN(TextCNN):
def init_eval_weights(self, weights):
self.eval_session = None
self.weights = weights
def __del__(self):
if self.eval_session:
self.eval_session.close()
def __eval(self, tensor_str, x):
x = np.float32(x)
if not self.eval_session:
self.eval_session = tf.Session()
with self.eval_session.as_default():
self.build_for_eval()
self.eval_session.run(tf.global_variables_initializer())
return self.eval_session.run(eval(tensor_str), {self.x_for_eval: x})
def eval(self, x):
return self.__eval("self.out_for_eval", x)
def eval_layers(self, x):
return self.__eval("self.layers", x)
def eval_binary_class(self, x, class_index):
"""
evaluate network that have two dimensional softmax output computed from original specified class output against
highest output of the other classes
:param x:
:param class_index:
:return:
"""
return self.__eval("self.out_for_class_eval[{}]".format(class_index), x)
def build_for_eval(self):
with tf.name_scope("Input"):
self.tf_emb_for_eval = tf.constant(self.embedding, name="Embedding", dtype=tf.float32)
self.x_for_eval = tf.placeholder(tf.int32, shape=[None, None], name="Index_input")
with tf.name_scope("CNN_for_text"):
filter = tf.constant(self.weights[0][0], name="Filter")
flowing_x = tf.nn.embedding_lookup(self.tf_emb_for_eval, self.x_for_eval, name="Embedding_layer")
self.layers = [flowing_x]
flowing_x = tf.nn.conv1d(flowing_x, filter, 1, "SAME", name="Conv_layer")
flowing_x = tf.nn.relu(flowing_x)
self.layers.append(flowing_x)
flowing_x = tf.reduce_max(flowing_x, axis=1)
self.layers.append(flowing_x)
for i in range(len(self.weights[1])):
with tf.name_scope("layer_{}".format(i)):
W = tf.constant(self.weights[0][i+1], name="Weights_{}".format(i))
b = tf.constant(self.weights[1][i], name="Biases_{}".format(i))
flowing_x = self.activation_function(tf.nn.xw_plus_b(flowing_x, W, b))
self.layers.append(flowing_x)
y = flowing_x
with tf.name_scope("Output"):
self.out_for_eval = tf.nn.softmax(y)
self.layers.append(flowing_x)
with tf.name_scope("Binary_class_output"):
self.out_for_class_eval = list()
for i in range(self.layer_sizes[-1]):
mask = True+np.zeros(self.layer_sizes[-1], dtype=np.bool)
mask[i] = False
out = tf.nn.softmax(tf.stack([
self.out_for_eval[:,i],
tf.reduce_max(
tf.boolean_mask(self.out_for_eval, mask, axis=1), axis=1)
], axis=1))
self.out_for_class_eval.append(out)
def train_ended(self, session):
super().train_ended(session)
self.init_eval_weights(weights=[self.trained_W, self.trained_b])
def name(self):
return "TextCNN_for_deepred_{}".format("-".join([str(ls) for ls in self.layer_sizes]))
| 39.931034
| 119
| 0.582038
| 923
| 6,948
| 4.141928
| 0.149512
| 0.046037
| 0.062778
| 0.04316
| 0.81062
| 0.761705
| 0.751504
| 0.725347
| 0.725347
| 0.725347
| 0
| 0.007394
| 0.299223
| 6,948
| 173
| 120
| 40.16185
| 0.777778
| 0.122481
| 0
| 0.775
| 0
| 0
| 0.058725
| 0.016275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15
| false
| 0
| 0.033333
| 0.05
| 0.283333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
918095fe4fb77aed9b940068f36879eabaa07d32
| 201
|
py
|
Python
|
netket/dynamics.py
|
tvieijra/netket
|
ef3ff32b242f25b6a6ae0f08db1aada85775a2ea
|
[
"Apache-2.0"
] | 10
|
2019-11-29T02:51:53.000Z
|
2021-08-14T18:52:33.000Z
|
netket/dynamics.py
|
tvieijra/netket
|
ef3ff32b242f25b6a6ae0f08db1aada85775a2ea
|
[
"Apache-2.0"
] | 2
|
2018-11-04T14:38:01.000Z
|
2018-11-08T16:56:10.000Z
|
netket/dynamics.py
|
tvieijra/netket
|
ef3ff32b242f25b6a6ae0f08db1aada85775a2ea
|
[
"Apache-2.0"
] | 6
|
2019-12-02T07:29:01.000Z
|
2021-04-04T21:55:21.000Z
|
from ._C_netket.dynamics import *
from . import _core
@_core.deprecated("function has been renamed to `timestepper`")
def create_timestepper(*args, **kwargs):
return timestepper(*args, **kwargs)
| 25.125
| 63
| 0.746269
| 25
| 201
| 5.8
| 0.72
| 0.206897
| 0.289655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129353
| 201
| 7
| 64
| 28.714286
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
919f2434d7ac63a82edb3ce5f09a3003aeea2dfb
| 4,106
|
py
|
Python
|
dfme/dfme/dataloader.py
|
cleverhans-lab/model-extraction-iclr
|
805205287876423621baca9d5e990edfe68ea803
|
[
"MIT"
] | null | null | null |
dfme/dfme/dataloader.py
|
cleverhans-lab/model-extraction-iclr
|
805205287876423621baca9d5e990edfe68ea803
|
[
"MIT"
] | null | null | null |
dfme/dfme/dataloader.py
|
cleverhans-lab/model-extraction-iclr
|
805205287876423621baca9d5e990edfe68ea803
|
[
"MIT"
] | null | null | null |
from torchvision import datasets, transforms
import torch
def get_dataloader(args):
if args.dataset.lower()=='mnist':
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(args.data_root, train=True, download=True,
transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(args.data_root, train=False, download=True,
transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
elif args.dataset.lower()=='fashion-mnist':
train_loader = torch.utils.data.DataLoader(
datasets.FashionMNIST(args.data_root, train=True, download=True,
transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor()
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(
datasets.FashionMNIST(args.data_root, train=False, download=True,
transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor()
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
elif args.dataset.lower()=='svhn':
print("Loading SVHN data")
train_loader = torch.utils.data.DataLoader(
datasets.SVHN(args.data_root, split='train', download=True,
transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize((0.43768206, 0.44376972, 0.47280434), (0.19803014, 0.20101564, 0.19703615)),
# transforms.Normalize((0.5,0.5,0.5),(0.5,0.5,0.5)),
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(
datasets.SVHN(args.data_root, split='test', download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.43768206, 0.44376972, 0.47280434), (0.19803014, 0.20101564, 0.19703615)),
# transforms.Normalize((0.5,0.5,0.5),(0.5,0.5,0.5)),
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
elif args.dataset.lower()=='cifar10':
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(args.data_root, train=True, download=True,
transform=transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(args.data_root, train=False, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
return train_loader, test_loader
| 53.324675
| 125
| 0.525572
| 404
| 4,106
| 5.235149
| 0.158416
| 0.068085
| 0.014184
| 0.018913
| 0.899764
| 0.899764
| 0.899764
| 0.895035
| 0.885579
| 0.885579
| 0
| 0.094069
| 0.355334
| 4,106
| 77
| 126
| 53.324675
| 0.704949
| 0.024598
| 0
| 0.728571
| 0
| 0
| 0.01374
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014286
| false
| 0
| 0.028571
| 0
| 0.057143
| 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91a51b02800f40ee8ecf0c03c2c2d83448d32ec9
| 44,844
|
py
|
Python
|
dlpy/tests/test_model.py
|
jld23/python-dlpy
|
39fe417a02da8f40975691392f5735fe02160da0
|
[
"Apache-2.0"
] | null | null | null |
dlpy/tests/test_model.py
|
jld23/python-dlpy
|
39fe417a02da8f40975691392f5735fe02160da0
|
[
"Apache-2.0"
] | null | null | null |
dlpy/tests/test_model.py
|
jld23/python-dlpy
|
39fe417a02da8f40975691392f5735fe02160da0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This test requires a running CAS server. You must use an ~/.authinfo
# file to specify your username and password. The CAS host and port must
# be specified using the CASHOST and CASPORT environment variables.
# A specific protocol ('cas', 'http', 'https', or 'auto') can be set using
# the CASPROTOCOL environment variable.
import os
#import onnx
import swat
import swat.utils.testing as tm
from swat.cas.table import CASTable
from dlpy.model import Model, Optimizer, AdamSolver, Sequence
from dlpy.sequential import Sequential
from dlpy.timeseries import TimeseriesTable
from dlpy.layers import (InputLayer, Conv2d, Conv1d, Pooling, Dense, OutputLayer,
Recurrent, Keypoints, BN, Res, Concat, Reshape, GlobalAveragePooling1D)
from dlpy.utils import caslibify
from dlpy.applications import Tiny_YoloV2
import unittest
class TestModel(unittest.TestCase):
'''
Please locate the images.sashdat file under the datasources to the DLPY_DATA_DIR.
'''
server_type = None
s = None
server_sep = '/'
data_dir = None
data_dir_local = None
@classmethod
def setUpClass(cls):
swat.reset_option()
swat.options.cas.print_messages = False
swat.options.interactive_mode = False
cls.s = swat.CAS()
cls.server_type = tm.get_cas_host_type(cls.s)
cls.server_sep = '\\'
if cls.server_type.startswith("lin") or cls.server_type.startswith("osx"):
cls.server_sep = '/'
if 'DLPY_DATA_DIR' in os.environ:
cls.data_dir = os.environ.get('DLPY_DATA_DIR')
if cls.data_dir.endswith(cls.server_sep):
cls.data_dir = cls.data_dir[:-1]
cls.data_dir += cls.server_sep
if 'DLPY_DATA_DIR_LOCAL' in os.environ:
cls.data_dir_local = os.environ.get('DLPY_DATA_DIR_LOCAL')
if cls.data_dir_local.endswith(cls.server_sep):
cls.data_dir_local = cls.data_dir_local[:-1]
cls.data_dir_local += cls.server_sep
def test_model1(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', lr=0.001)
if r.severity > 0:
for msg in r.messages:
print(msg)
self.assertTrue(r.severity <= 1)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model2(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_')
self.assertTrue(r.severity == 0)
r2 = model1.predict(data='eee')
self.assertTrue(r2.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model3(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_')
self.assertTrue(r.severity == 0)
r1 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=3)
self.assertTrue(r1.severity == 0)
r2 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=2)
self.assertTrue(r2.severity == 0)
r3 = model1.predict(data='eee')
self.assertTrue(r3.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model4(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_')
self.assertTrue(r.severity == 0)
r2 = model1.evaluate(data='eee')
self.assertTrue(r2.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model5(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_')
self.assertTrue(r.severity == 0)
r1 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=3)
self.assertTrue(r1.severity == 0)
r2 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=2)
self.assertTrue(r2.severity == 0)
r3 = model1.evaluate(data='eee')
self.assertTrue(r3.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model6(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model7(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r2 = model1.predict(data='eee', use_best_weights=True)
self.assertTrue(r2.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model8(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r2 = model1.predict(data='eee')
self.assertTrue(r2.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model9(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r2 = model1.evaluate(data='eee', use_best_weights=True)
self.assertTrue(r2.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model10(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r2 = model1.evaluate(data='eee')
self.assertTrue(r2.severity == 0)
model1.save_to_table(self.data_dir)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model11(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r1 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=3)
self.assertTrue(r1.severity == 0)
r2 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=2)
self.assertTrue(r2.severity == 0)
r3 = model1.evaluate(data='eee', use_best_weights=True)
self.assertTrue(r3.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model12(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', save_best_weights=True)
self.assertTrue(r.severity == 0)
r1 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=3)
self.assertTrue(r1.severity == 0)
r2 = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=2, save_best_weights=True)
self.assertTrue(r2.severity == 0)
r3 = model1.predict(data='eee', use_best_weights=True)
self.assertTrue(r3.severity == 0)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model13(self):
model = Sequential(self.s, model_table='simple_cnn')
model.add(InputLayer(3, 224, 224))
model.add(Conv2d(2, 3))
model.add(Pooling(2))
model.add(Dense(4))
model.add(OutputLayer(n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
model.save_to_table(self.data_dir)
def test_model13a(self):
model = Sequential(self.s, model_table='simple_cnn')
model.add(InputLayer(3, 224, 224))
model.add(Conv2d(2, 3))
model.add(Pooling(2))
model.add(Dense(4))
model.add(OutputLayer(n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
model.save_to_table(self.data_dir)
def test_model13b(self):
model = Sequential(self.s, model_table='simple_cnn')
model.add(layer=InputLayer(n_channels=1, height=10, width=10))
model.add(layer=OutputLayer(n=10, full_connect=False))
self.assertTrue(model.summary.loc[1, 'Number of Parameters'] == (0, 0))
model1 = Sequential(self.s, model_table='simple_cnn')
model1.add(layer=InputLayer(n_channels=1, height=10, width=10))
model1.add(layer=OutputLayer(n=10, full_connect=True))
self.assertTrue(model1.summary.loc[1, 'Number of Parameters'] == (1000, 10))
model2 = Sequential(self.s, model_table='Simple_CNN')
model2.add(layer=InputLayer(n_channels=1, height=10, width=10))
model2.add(layer=OutputLayer(n=10, full_connect=True, include_bias=False))
self.assertTrue(model2.summary.loc[1, 'Number of Parameters'] == (1000, 0))
model3 = Sequential(self.s, model_table='Simple_CNN')
model3.add(layer=InputLayer(n_channels=1, height=10, width=10))
model3.add(layer=Conv2d(4, 3))
model3.add(layer=OutputLayer(n=10))
self.assertTrue(model3.summary.loc[2, 'Number of Parameters'] == (4000, 10))
model4 = Sequential(self.s, model_table='Simple_CNN')
model4.add(layer=InputLayer(n_channels=1, height=10, width=10))
model4.add(layer=Conv2d(4, 3))
model4.add(layer=OutputLayer(n=10, full_connect=False))
self.assertTrue(model4.summary.loc[2, 'Number of Parameters'] == (0, 0))
def test_model14(self):
model = Sequential(self.s, model_table='Simple_CNN')
model.add(layer=InputLayer(n_channels=1, height=10, width=10))
model.add(layer=OutputLayer())
model.summary
def test_model15(self):
model = Sequential(self.s, model_table='Simple_CNN')
model.add(layer=InputLayer(n_channels=1, height=10, width=10))
model.add(layer=Keypoints())
self.assertTrue(model.summary.loc[1, 'Number of Parameters'] == (0, 0))
def test_model16(self):
model = Sequential(self.s, model_table='Simple_CNN')
model.add(layer=InputLayer(n_channels=1, height=10, width=10))
model.add(layer=Keypoints(n=10, include_bias=False))
self.assertTrue(model.summary.loc[1, 'Number of Parameters'] == (1000, 0))
def test_model16(self):
model = Sequential(self.s, model_table='Simple_CNN')
model.add(layer=InputLayer(n_channels=1, height=10, width=10))
model.add(layer=Keypoints(n=10))
self.assertTrue(model.summary.loc[1, 'Number of Parameters'] == (1000, 10))
def test_model18(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', max_epochs=1)
self.assertTrue(r.severity == 0)
model1.save_weights_csv(self.data_dir)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_evaluate_obj_det(self):
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path = self.data_dir + 'evaluate_obj_det_det.sashdat', task = 'load')
self.s.table.loadtable(caslib = caslib,
casout = {'name': 'evaluate_obj_det_det', 'replace': True},
path = path)
self.s.table.loadtable(caslib = caslib,
casout = {'name': 'evaluate_obj_det_gt', 'replace': True},
path = 'evaluate_obj_det_gt.sashdat')
yolo_anchors = (5.9838598901098905,
3.4326923076923075,
2.184993862520458,
1.9841448445171848,
1.0261752136752136,
1.2277777777777779)
yolo_model = Tiny_YoloV2(self.s, grid_number = 17, scale = 1.0 / 255,
n_classes = 1, height = 544, width = 544,
predictions_per_grid = 3,
anchors = yolo_anchors,
max_boxes = 100,
coord_type = 'yolo',
max_label_per_image = 100,
class_scale = 1.0,
coord_scale = 2.0,
prediction_not_a_object_scale = 1,
object_scale = 5,
detection_threshold = 0.05,
iou_threshold = 0.2)
metrics = yolo_model.evaluate_object_detection(ground_truth = 'evaluate_obj_det_gt', coord_type = 'yolo',
detection_data = 'evaluate_obj_det_det', iou_thresholds=0.5)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_model_forecast1(self):
import datetime
try:
import pandas as pd
except:
unittest.TestCase.skipTest(self, "pandas not found in the libraries")
import numpy as np
filename1 = os.path.join(os.path.dirname(__file__), 'datasources', 'timeseries_exp1.csv')
importoptions1 = dict(filetype='delimited', delimiter=',')
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
self.table1 = TimeseriesTable.from_localfile(self.s, filename1, importoptions=importoptions1)
self.table1.timeseries_formatting(timeid='datetime',
timeseries=['series', 'covar'],
timeid_informat='ANYDTDTM19.',
timeid_format='DATETIME19.')
self.table1.timeseries_accumlation(acc_interval='day',
groupby=['id1var', 'id2var'])
self.table1.prepare_subsequences(seq_len=2,
target='series',
predictor_timeseries=['series'],
missing_handling='drop')
valid_start = datetime.date(2015, 1, 4)
test_start = datetime.date(2015, 1, 7)
traintbl, validtbl, testtbl = self.table1.timeseries_partition(
validation_start=valid_start, testing_start=test_start)
model1 = Sequential(self.s, model_table='lstm_rnn')
model1.add(InputLayer(std='STD'))
model1.add(Recurrent(rnn_type='LSTM', output_type='encoding', n=15, reversed_=False))
model1.add(OutputLayer(act='IDENTITY'))
optimizer = Optimizer(algorithm=AdamSolver(learning_rate=0.01), mini_batch_size=32,
seed=1234, max_epochs=10)
seq_spec = Sequence(**traintbl.sequence_opt)
result = model1.fit(traintbl, valid_table=validtbl, optimizer=optimizer,
sequence=seq_spec, **traintbl.inputs_target)
self.assertTrue(result.severity == 0)
resulttbl1 = model1.forecast(horizon=1)
self.assertTrue(isinstance(resulttbl1, CASTable))
self.assertTrue(resulttbl1.shape[0]==15)
local_resulttbl1 = resulttbl1.to_frame()
unique_time = local_resulttbl1.datetime.unique()
self.assertTrue(len(unique_time)==1)
self.assertTrue(pd.Timestamp(unique_time[0])==datetime.datetime(2015,1,7))
resulttbl2 = model1.forecast(horizon=3)
self.assertTrue(isinstance(resulttbl2, CASTable))
self.assertTrue(resulttbl2.shape[0]==45)
local_resulttbl2 = resulttbl2.to_frame()
local_resulttbl2.sort_values(by=['id1var', 'id2var', 'datetime'], inplace=True)
unique_time = local_resulttbl2.datetime.unique()
self.assertTrue(len(unique_time)==3)
for i in range(3):
self.assertTrue(pd.Timestamp(unique_time[i])==datetime.datetime(2015,1,7+i))
series_lag1 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag1'].values
series_lag2 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag2'].values
DL_Pred = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'_DL_Pred_'].values
self.assertTrue(np.array_equal(series_lag1[1:3], DL_Pred[0:2]))
self.assertTrue(series_lag2[2]==DL_Pred[0])
def test_model_forecast2(self):
import datetime
try:
import pandas as pd
except:
unittest.TestCase.skipTest(self, "pandas not found in the libraries")
import numpy as np
filename1 = os.path.join(os.path.dirname(__file__), 'datasources', 'timeseries_exp1.csv')
importoptions1 = dict(filetype='delimited', delimiter=',')
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
self.table2 = TimeseriesTable.from_localfile(self.s, filename1, importoptions=importoptions1)
self.table2.timeseries_formatting(timeid='datetime',
timeseries=['series', 'covar'],
timeid_informat='ANYDTDTM19.',
timeid_format='DATETIME19.')
self.table2.timeseries_accumlation(acc_interval='day',
groupby=['id1var', 'id2var'])
self.table2.prepare_subsequences(seq_len=2,
target='series',
predictor_timeseries=['series', 'covar'],
missing_handling='drop')
valid_start = datetime.date(2015, 1, 4)
test_start = datetime.date(2015, 1, 7)
traintbl, validtbl, testtbl = self.table2.timeseries_partition(
validation_start=valid_start, testing_start=test_start)
model1 = Sequential(self.s, model_table='lstm_rnn')
model1.add(InputLayer(std='STD'))
model1.add(Recurrent(rnn_type='LSTM', output_type='encoding', n=15, reversed_=False))
model1.add(OutputLayer(act='IDENTITY'))
optimizer = Optimizer(algorithm=AdamSolver(learning_rate=0.01), mini_batch_size=32,
seed=1234, max_epochs=10)
seq_spec = Sequence(**traintbl.sequence_opt)
result = model1.fit(traintbl, valid_table=validtbl, optimizer=optimizer,
sequence=seq_spec, **traintbl.inputs_target)
self.assertTrue(result.severity == 0)
resulttbl1 = model1.forecast(testtbl, horizon=1)
self.assertTrue(isinstance(resulttbl1, CASTable))
self.assertTrue(resulttbl1.shape[0]==testtbl.shape[0])
local_resulttbl1 = resulttbl1.to_frame()
unique_time = local_resulttbl1.datetime.unique()
self.assertTrue(len(unique_time)==4)
for i in range(4):
self.assertTrue(pd.Timestamp(unique_time[i])==datetime.datetime(2015,1,7+i))
resulttbl2 = model1.forecast(testtbl, horizon=3)
self.assertTrue(isinstance(resulttbl2, CASTable))
self.assertTrue(resulttbl2.shape[0]==45)
local_resulttbl2 = resulttbl2.to_frame()
local_resulttbl2.sort_values(by=['id1var', 'id2var', 'datetime'], inplace=True)
unique_time = local_resulttbl2.datetime.unique()
self.assertTrue(len(unique_time)==3)
for i in range(3):
self.assertTrue(pd.Timestamp(unique_time[i])==datetime.datetime(2015,1,7+i))
series_lag1 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag1'].values
series_lag2 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag2'].values
DL_Pred = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'_DL_Pred_'].values
self.assertTrue(np.array_equal(series_lag1[1:3], DL_Pred[0:2]))
self.assertTrue(series_lag2[2]==DL_Pred[0])
def test_model_forecast3(self):
import datetime
try:
import pandas as pd
except:
unittest.TestCase.skipTest(self, "pandas not found in the libraries")
import numpy as np
filename1 = os.path.join(os.path.dirname(__file__), 'datasources', 'timeseries_exp1.csv')
importoptions1 = dict(filetype='delimited', delimiter=',')
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
self.table3 = TimeseriesTable.from_localfile(self.s, filename1, importoptions=importoptions1)
self.table3.timeseries_formatting(timeid='datetime',
timeseries=['series', 'covar'],
timeid_informat='ANYDTDTM19.',
timeid_format='DATETIME19.')
self.table3.timeseries_accumlation(acc_interval='day',
groupby=['id1var', 'id2var'])
self.table3.prepare_subsequences(seq_len=2,
target='series',
predictor_timeseries=['series', 'covar'],
missing_handling='drop')
valid_start = datetime.date(2015, 1, 4)
test_start = datetime.date(2015, 1, 7)
traintbl, validtbl, testtbl = self.table3.timeseries_partition(
validation_start=valid_start, testing_start=test_start)
sascode = '''
data {};
set {};
drop series_lag1;
run;
'''.format(validtbl.name, validtbl.name)
self.s.retrieve('dataStep.runCode', _messagelevel='error', code=sascode)
sascode = '''
data {};
set {};
drop series_lag1;
run;
'''.format(testtbl.name, testtbl.name)
self.s.retrieve('dataStep.runCode', _messagelevel='error', code=sascode)
model1 = Sequential(self.s, model_table='lstm_rnn')
model1.add(InputLayer(std='STD'))
model1.add(Recurrent(rnn_type='LSTM', output_type='encoding', n=15, reversed_=False))
model1.add(OutputLayer(act='IDENTITY'))
optimizer = Optimizer(algorithm=AdamSolver(learning_rate=0.01), mini_batch_size=32,
seed=1234, max_epochs=10)
seq_spec = Sequence(**traintbl.sequence_opt)
result = model1.fit(traintbl, optimizer=optimizer,
sequence=seq_spec, **traintbl.inputs_target)
self.assertTrue(result.severity == 0)
resulttbl1 = model1.forecast(validtbl, horizon=1)
self.assertTrue(isinstance(resulttbl1, CASTable))
self.assertTrue(resulttbl1.shape[0]==15)
local_resulttbl1 = resulttbl1.to_frame()
unique_time = local_resulttbl1.datetime.unique()
self.assertTrue(len(unique_time)==1)
self.assertTrue(pd.Timestamp(unique_time[0])==datetime.datetime(2015,1,4))
resulttbl2 = model1.forecast(validtbl, horizon=3)
self.assertTrue(isinstance(resulttbl2, CASTable))
self.assertTrue(resulttbl2.shape[0]==45)
local_resulttbl2 = resulttbl2.to_frame()
local_resulttbl2.sort_values(by=['id1var', 'id2var', 'datetime'], inplace=True)
unique_time = local_resulttbl2.datetime.unique()
self.assertTrue(len(unique_time)==3)
for i in range(3):
self.assertTrue(pd.Timestamp(unique_time[i])==datetime.datetime(2015,1,4+i))
series_lag1 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag1'].values
series_lag2 = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'series_lag2'].values
DL_Pred = local_resulttbl2.loc[(local_resulttbl2.id1var==1) & (local_resulttbl2.id2var==1),
'_DL_Pred_'].values
self.assertTrue(np.array_equal(series_lag1[1:3], DL_Pred[0:2]))
self.assertTrue(series_lag2[2]==DL_Pred[0])
with self.assertRaises(RuntimeError):
resulttbl3 = model1.forecast(testtbl, horizon=3)
def test_load_reshape_detection(self):
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
yolo_model = Model(self.s)
yolo_model.load(self.data_dir + 'YOLOV2_MULTISIZE.sashdat')
model_df = self.s.fetch(table = dict(name = yolo_model.model_name,
where = '_DLKey0_ eq "detection1" or _DLKey0_ eq "reshape1"'),
to = 50).Fetch
anchors_5 = model_df['_DLNumVal_'][model_df['_DLKey1_'] == 'detectionopts.anchors.8'].tolist()[0]
self.assertAlmostEqual(anchors_5, 1.0907, 4)
depth = model_df['_DLNumVal_'][model_df['_DLKey1_'] == 'reshapeopts.depth'].tolist()[0]
self.assertEqual(depth, 256)
def test_plot_ticks(self):
model1 = Sequential(self.s, model_table='Simple_CNN1')
model1.add(InputLayer(3, 224, 224))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Conv2d(8, 7))
model1.add(Pooling(2))
model1.add(Dense(16))
model1.add(OutputLayer(act='softmax', n=2))
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
caslib, path, tmp_caslib = caslibify(self.s, path=self.data_dir+'images.sashdat', task='load')
self.s.table.loadtable(caslib=caslib,
casout={'name': 'eee', 'replace': True},
path=path)
r = model1.fit(data='eee', inputs='_image_', target='_label_', lr=0.001, max_epochs=5)
# Test default tick_frequency value of 1
ax = model1.plot_training_history()
self.assertEqual(len(ax.xaxis.majorTicks), model1.n_epochs)
# Test even
tick_frequency = 2
ax = model1.plot_training_history(tick_frequency=tick_frequency)
self.assertEqual(len(ax.xaxis.majorTicks), model1.n_epochs // tick_frequency + 1)
# Test odd
tick_frequency = 3
ax = model1.plot_training_history(tick_frequency=tick_frequency)
self.assertEqual(len(ax.xaxis.majorTicks), model1.n_epochs // tick_frequency + 1)
# Test max
tick_frequency = model1.n_epochs
ax = model1.plot_training_history(tick_frequency=tick_frequency)
self.assertEqual(len(ax.xaxis.majorTicks), model1.n_epochs // tick_frequency + 1)
# Test 0
tick_frequency = 0
ax = model1.plot_training_history(tick_frequency=tick_frequency)
self.assertEqual(len(ax.xaxis.majorTicks), model1.n_epochs)
if (caslib is not None) and tmp_caslib:
self.s.retrieve('table.dropcaslib', message_level = 'error', caslib = caslib)
def test_stride(self):
model = Sequential(self.s, model_table = 'Simple_CNN_3classes_cropped')
model.add(InputLayer(1, width = 36, height = 144, #offsets = myimage.channel_means,
name = 'input1',
random_mutation = 'random',
random_flip = 'HV'))
model.add(Conv2d(64, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(64, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(64, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Pooling(height = 2, width = 2, stride_vertical = 2, stride_horizontal = 1, pool = 'max')) # 72, 36
model.add(Conv2d(128, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(128, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(128, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Pooling(height = 2, width = 2, stride_vertical = 2, stride_horizontal = 1, pool = 'max')) # 36*36
model.add(Conv2d(256, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(256, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(256, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Pooling(2, pool = 'max')) # 18 * 18
model.add(Conv2d(512, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(512, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(512, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Pooling(2, pool = 'max')) # 9 * 9
model.add(Conv2d(1024, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(1024, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Conv2d(1024, 3, 3, include_bias = False, act = 'identity'))
model.add(BN(act = 'relu'))
model.add(Pooling(9))
model.add(Dense(256, dropout = 0.5))
model.add(OutputLayer(act = 'softmax', n = 3, name = 'output1'))
self.assertEqual(model.summary['Output Size'].values[-3], (1, 1, 1024))
model.print_summary()
# 2d print summary numerical check
self.assertEqual(model.summary.iloc[1, -1], 2985984)
def test_heat_map_analysis(self):
if self.data_dir is None:
unittest.TestCase.skipTest(self, 'DLPY_DATA_DIR is not set in the environment variables')
from dlpy.applications import ResNet50_Caffe
from dlpy.images import ImageTable
pre_train_weight_file = os.path.join(self.data_dir, 'ResNet-50-model.caffemodel.h5')
my_im = ImageTable.load_files(self.s, self.data_dir+'giraffe_dolphin_small')
my_im_r = my_im.resize(width=224, inplace=False)
model = ResNet50_Caffe(self.s, model_table='ResNet50_Caffe',
n_classes=2, n_channels=3, width=224, height=224, scale=1,
random_flip='none', random_crop='none',
offsets=my_im_r.channel_means, pre_trained_weights=True,
pre_trained_weights_file=pre_train_weight_file,
include_top=False)
model.fit(data=my_im_r, mini_batch_size=1, max_epochs=1)
model.heat_map_analysis(data=my_im_r, mask_width=None, mask_height=None, step_size=None,
max_display=1)
self.assertRaises(ValueError, lambda:model.heat_map_analysis(mask_width=56, mask_height=56,
step_size=8, display=False))
self.assertRaises(ValueError, lambda:model.heat_map_analysis(data=my_im, mask_width=56,
mask_height=56, step_size=8, display=False))
try:
from numpy import array
except:
unittest.TestCase.skipTest(self, 'numpy is not installed')
self.assertRaises(ValueError, lambda:model.heat_map_analysis(data=array([]), mask_width=56,
mask_height=56, step_size=8, display=False))
def test_load_padding(self):
if self.data_dir is None:
unittest.TestCase.skipTest(self, "DLPY_DATA_DIR is not set in the environment variables")
model5 = Model(self.s)
model5.load(path = self.data_dir + 'vgg16.sashdat')
def test_conv1d_model(self):
# a model from https://blog.goodaudience.com/introduction-to-1d-convolutional-neural-networks-in-keras-for-time-sequences-3a7ff801a2cf
Conv1D = Conv1d
MaxPooling1D=Pooling
model_m = Sequential(self.s)
model_m.add(InputLayer(width=80*3, height=1, n_channels=1))
model_m.add(Conv1D(100, 10, act='relu'))
model_m.add(Conv1D(100, 10, act='relu'))
model_m.add(MaxPooling1D(3))
model_m.add(Conv1D(160, 10, act='relu'))
model_m.add(Conv1D(160, 10, act='relu'))
model_m.add(GlobalAveragePooling1D(dropout=0.5))
model_m.add(OutputLayer(n=6, act='softmax'))
# use assertEqual to check whether the layer output size matches the expected value for MaxPooling1D
self.assertEqual(model_m.layers[3].output_size, (1, 80, 100))
model_m.print_summary()
# 1d print summary numerical check
self.assertEqual(model_m.summary.iloc[1, -1], 240000)
@classmethod
def tearDownClass(cls):
# tear down tests
try:
cls.s.terminate()
except swat.SWATError:
pass
del cls.s
swat.reset_option()
| 44.137795
| 142
| 0.595152
| 5,421
| 44,844
| 4.767202
| 0.095739
| 0.03796
| 0.01958
| 0.023217
| 0.813605
| 0.802616
| 0.794915
| 0.779321
| 0.768254
| 0.745192
| 0
| 0.046406
| 0.281598
| 44,844
| 1,015
| 143
| 44.181281
| 0.755774
| 0.032981
| 0
| 0.70288
| 0
| 0
| 0.105791
| 0.004133
| 0
| 0
| 0
| 0
| 0.113874
| 1
| 0.041885
| false
| 0.001309
| 0.037958
| 0
| 0.087696
| 0.005236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91cb6456311bc0c8ebfa19ec0dbf7c69ecdf5b76
| 93,748
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/route53resolver/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/route53resolver/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/route53resolver/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from botocore.waiter import Waiter
from typing import Union
from typing import List
class Client(BaseClient):
def associate_resolver_endpoint_ip_address(self, ResolverEndpointId: str, IpAddress: Dict) -> Dict:
"""
Adds IP addresses to an inbound or an outbound resolver endpoint. If you want to adding more than one IP address, submit one ``AssociateResolverEndpointIpAddress`` request for each IP address.
To remove an IP address from an endpoint, see DisassociateResolverEndpointIpAddress .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/AssociateResolverEndpointIpAddress>`_
**Request Syntax**
::
response = client.associate_resolver_endpoint_ip_address(
ResolverEndpointId='string',
IpAddress={
'IpId': 'string',
'SubnetId': 'string',
'Ip': 'string'
}
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
The response to an ``AssociateResolverEndpointIpAddress`` request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to associate IP addresses with.
:type IpAddress: dict
:param IpAddress: **[REQUIRED]**
Either the IPv4 address that you want to add to a resolver endpoint or a subnet ID. If you specify a subnet ID, Resolver chooses an IP address for you from the available IPs in the specified subnet.
- **IpId** *(string) --*
*Only when removing an IP address from a resolver endpoint* : The ID of the IP address that you want to remove. To get this ID, use GetResolverEndpoint .
- **SubnetId** *(string) --*
The ID of the subnet that includes the IP address that you want to update. To get this ID, use GetResolverEndpoint .
- **Ip** *(string) --*
The new IP address.
:rtype: dict
:returns:
"""
pass
def associate_resolver_rule(self, ResolverRuleId: str, VPCId: str, Name: str = None) -> Dict:
"""
Associates a resolver rule with a VPC. When you associate a rule with a VPC, Resolver forwards all DNS queries for the domain name that is specified in the rule and that originate in the VPC. The queries are forwarded to the IP addresses for the DNS resolvers that are specified in the rule. For more information about rules, see CreateResolverRule .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/AssociateResolverRule>`_
**Request Syntax**
::
response = client.associate_resolver_rule(
ResolverRuleId='string',
Name='string',
VPCId='string'
)
**Response Syntax**
::
{
'ResolverRuleAssociation': {
'Id': 'string',
'ResolverRuleId': 'string',
'Name': 'string',
'VPCId': 'string',
'Status': 'CREATING'|'COMPLETE'|'DELETING'|'FAILED'|'OVERRIDDEN',
'StatusMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRuleAssociation** *(dict) --*
Information about the ``AssociateResolverRule`` request, including the status of the request.
- **Id** *(string) --*
The ID of the association between a resolver rule and a VPC. Resolver assigns this value when you submit an AssociateResolverRule request.
- **ResolverRuleId** *(string) --*
The ID of the resolver rule that you associated with the VPC that is specified by ``VPCId`` .
- **Name** *(string) --*
The name of an association between a resolver rule and a VPC.
- **VPCId** *(string) --*
The ID of the VPC that you associated the resolver rule with.
- **Status** *(string) --*
A code that specifies the current status of the association between a resolver rule and a VPC.
- **StatusMessage** *(string) --*
A detailed description of the status of the association between a resolver rule and a VPC.
:type ResolverRuleId: string
:param ResolverRuleId: **[REQUIRED]**
The ID of the resolver rule that you want to associate with the VPC. To list the existing resolver rules, use ListResolverRules .
:type Name: string
:param Name:
A name for the association that you\'re creating between a resolver rule and a VPC.
:type VPCId: string
:param VPCId: **[REQUIRED]**
The ID of the VPC that you want to associate the resolver rule with.
:rtype: dict
:returns:
"""
pass
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def create_resolver_endpoint(self, CreatorRequestId: str, SecurityGroupIds: List, Direction: str, IpAddresses: List, Name: str = None, Tags: List = None) -> Dict:
"""
Creates a resolver endpoint. There are two types of resolver endpoints, inbound and outbound:
* An *inbound resolver endpoint* forwards DNS queries to the DNS service for a VPC from your network or another VPC.
* An *outbound resolver endpoint* forwards DNS queries from the DNS service for a VPC to your network or another VPC.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/CreateResolverEndpoint>`_
**Request Syntax**
::
response = client.create_resolver_endpoint(
CreatorRequestId='string',
Name='string',
SecurityGroupIds=[
'string',
],
Direction='INBOUND'|'OUTBOUND',
IpAddresses=[
{
'SubnetId': 'string',
'Ip': 'string'
},
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
Information about the ``CreateResolverEndpoint`` request, including the status of the request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type CreatorRequestId: string
:param CreatorRequestId: **[REQUIRED]**
A unique string that identifies the request and that allows failed requests to be retried without the risk of executing the operation twice. ``CreatorRequestId`` can be any unique string, for example, a date/time stamp.
:type Name: string
:param Name:
A friendly name that lets you easily find a configuration in the Resolver dashboard in the Route 53 console.
:type SecurityGroupIds: list
:param SecurityGroupIds: **[REQUIRED]**
The ID of one or more security groups that you want to use to control access to this VPC. The security group that you specify must include one or more inbound rules (for inbound resolver endpoints) or outbound rules (for outbound resolver endpoints).
- *(string) --*
:type Direction: string
:param Direction: **[REQUIRED]**
Specify the applicable value:
* ``INBOUND`` : Resolver forwards DNS queries to the DNS service for a VPC from your network or another VPC
* ``OUTBOUND`` : Resolver forwards DNS queries from the DNS service for a VPC to your network or another VPC
:type IpAddresses: list
:param IpAddresses: **[REQUIRED]**
The subnets and IP addresses in your VPC that you want DNS queries to pass through on the way from your VPCs to your network (for outbound endpoints) or on the way from your network to your VPCs (for inbound resolver endpoints).
- *(dict) --*
In an CreateResolverEndpoint request, a subnet and IP address that you want to use for DNS queries.
- **SubnetId** *(string) --* **[REQUIRED]**
The subnet that contains the IP address.
- **Ip** *(string) --*
The IP address that you want to use for DNS queries.
:type Tags: list
:param Tags:
A list of the tag keys and values that you want to associate with the endpoint.
- *(dict) --*
One tag that you want to add to the specified resource. A tag consists of a ``Key`` (a name for the tag) and a ``Value`` .
- **Key** *(string) --*
The name for the tag. For example, if you want to associate Resolver resources with the account IDs of your customers for billing purposes, the value of ``Key`` might be ``account-id`` .
- **Value** *(string) --*
The value for the tag. For example, if ``Key`` is ``account-id`` , then ``Value`` might be the ID of the customer account that you\'re creating the resource for.
:rtype: dict
:returns:
"""
pass
def create_resolver_rule(self, CreatorRequestId: str, RuleType: str, DomainName: str, Name: str = None, TargetIps: List = None, ResolverEndpointId: str = None, Tags: List = None) -> Dict:
"""
For DNS queries that originate in your VPCs, specifies which resolver endpoint the queries pass through, one domain name that you want to forward to your network, and the IP addresses of the DNS resolvers in your network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/CreateResolverRule>`_
**Request Syntax**
::
response = client.create_resolver_rule(
CreatorRequestId='string',
Name='string',
RuleType='FORWARD'|'SYSTEM'|'RECURSIVE',
DomainName='string',
TargetIps=[
{
'Ip': 'string',
'Port': 123
},
],
ResolverEndpointId='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
**Response Syntax**
::
{
'ResolverRule': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'DomainName': 'string',
'Status': 'COMPLETE'|'DELETING'|'UPDATING'|'FAILED',
'StatusMessage': 'string',
'RuleType': 'FORWARD'|'SYSTEM'|'RECURSIVE',
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string',
'OwnerId': 'string',
'ShareStatus': 'NOT_SHARED'|'SHARED_WITH_ME'|'SHARED_BY_ME'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRule** *(dict) --*
Information about the ``CreateResolverRule`` request, including the status of the request.
- **Id** *(string) --*
The ID that Resolver assigned to the resolver rule when you created it.
- **CreatorRequestId** *(string) --*
A unique string that you specified when you created the resolver rule. ``CreatorRequestId`` identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver rule specified by ``Id`` .
- **DomainName** *(string) --*
DNS queries for this domain name are forwarded to the IP addresses that are specified in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), the query is routed using the resolver rule that contains the most specific domain name (www.example.com).
- **Status** *(string) --*
A code that specifies the current status of the resolver rule.
- **StatusMessage** *(string) --*
A detailed description of the status of a resolver rule.
- **RuleType** *(string) --*
This value is always ``FORWARD`` . Other resolver rule types aren't supported.
- **Name** *(string) --*
The name for the resolver rule, which you specified when you created the resolver rule.
- **TargetIps** *(list) --*
An array that contains the IP addresses and ports that you want to forward
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --*
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the endpoint that the rule is associated with.
- **OwnerId** *(string) --*
When a rule is shared with another AWS account, the account ID of the account that the rule is shared with.
- **ShareStatus** *(string) --*
Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account.
:type CreatorRequestId: string
:param CreatorRequestId: **[REQUIRED]**
A unique string that identifies the request and that allows failed requests to be retried without the risk of executing the operation twice. ``CreatorRequestId`` can be any unique string, for example, a date/time stamp.
:type Name: string
:param Name:
A friendly name that lets you easily find a rule in the Resolver dashboard in the Route 53 console.
:type RuleType: string
:param RuleType: **[REQUIRED]**
Specify ``FORWARD`` . Other resolver rule types aren\'t supported.
:type DomainName: string
:param DomainName: **[REQUIRED]**
DNS queries for this domain name are forwarded to the IP addresses that you specify in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), outbound DNS queries are routed using the resolver rule that contains the most specific domain name (www.example.com).
:type TargetIps: list
:param TargetIps:
The IPs that you want Resolver to forward DNS queries to. You can specify only IPv4 addresses. Separate IP addresses with a comma.
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --* **[REQUIRED]**
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
:type ResolverEndpointId: string
:param ResolverEndpointId:
The ID of the outbound resolver endpoint that you want to use to route DNS queries to the IP addresses that you specify in ``TargetIps`` .
:type Tags: list
:param Tags:
A list of the tag keys and values that you want to associate with the endpoint.
- *(dict) --*
One tag that you want to add to the specified resource. A tag consists of a ``Key`` (a name for the tag) and a ``Value`` .
- **Key** *(string) --*
The name for the tag. For example, if you want to associate Resolver resources with the account IDs of your customers for billing purposes, the value of ``Key`` might be ``account-id`` .
- **Value** *(string) --*
The value for the tag. For example, if ``Key`` is ``account-id`` , then ``Value`` might be the ID of the customer account that you\'re creating the resource for.
:rtype: dict
:returns:
"""
pass
def delete_resolver_endpoint(self, ResolverEndpointId: str) -> Dict:
"""
Deletes a resolver endpoint. The effect of deleting a resolver endpoint depends on whether it's an inbound or an outbound resolver endpoint:
* **Inbound** : DNS queries from your network or another VPC are no longer routed to the DNS service for the specified VPC.
* **Outbound** : DNS queries from a VPC are no longer routed to your network or to another VPC.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/DeleteResolverEndpoint>`_
**Request Syntax**
::
response = client.delete_resolver_endpoint(
ResolverEndpointId='string'
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
Information about the ``DeleteResolverEndpoint`` request, including the status of the request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to delete.
:rtype: dict
:returns:
"""
pass
def delete_resolver_rule(self, ResolverRuleId: str) -> Dict:
"""
Deletes a resolver rule. Before you can delete a resolver rule, you must disassociate it from all the VPCs that you associated the resolver rule with. For more infomation, see DisassociateResolverRule .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/DeleteResolverRule>`_
**Request Syntax**
::
response = client.delete_resolver_rule(
ResolverRuleId='string'
)
**Response Syntax**
::
{
'ResolverRule': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'DomainName': 'string',
'Status': 'COMPLETE'|'DELETING'|'UPDATING'|'FAILED',
'StatusMessage': 'string',
'RuleType': 'FORWARD'|'SYSTEM'|'RECURSIVE',
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string',
'OwnerId': 'string',
'ShareStatus': 'NOT_SHARED'|'SHARED_WITH_ME'|'SHARED_BY_ME'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRule** *(dict) --*
Information about the ``DeleteResolverRule`` request, including the status of the request.
- **Id** *(string) --*
The ID that Resolver assigned to the resolver rule when you created it.
- **CreatorRequestId** *(string) --*
A unique string that you specified when you created the resolver rule. ``CreatorRequestId`` identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver rule specified by ``Id`` .
- **DomainName** *(string) --*
DNS queries for this domain name are forwarded to the IP addresses that are specified in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), the query is routed using the resolver rule that contains the most specific domain name (www.example.com).
- **Status** *(string) --*
A code that specifies the current status of the resolver rule.
- **StatusMessage** *(string) --*
A detailed description of the status of a resolver rule.
- **RuleType** *(string) --*
This value is always ``FORWARD`` . Other resolver rule types aren't supported.
- **Name** *(string) --*
The name for the resolver rule, which you specified when you created the resolver rule.
- **TargetIps** *(list) --*
An array that contains the IP addresses and ports that you want to forward
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --*
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the endpoint that the rule is associated with.
- **OwnerId** *(string) --*
When a rule is shared with another AWS account, the account ID of the account that the rule is shared with.
- **ShareStatus** *(string) --*
Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account.
:type ResolverRuleId: string
:param ResolverRuleId: **[REQUIRED]**
The ID of the resolver rule that you want to delete.
:rtype: dict
:returns:
"""
pass
def disassociate_resolver_endpoint_ip_address(self, ResolverEndpointId: str, IpAddress: Dict) -> Dict:
"""
Removes IP addresses from an inbound or an outbound resolver endpoint. If you want to remove more than one IP address, submit one ``DisassociateResolverEndpointIpAddress`` request for each IP address.
To add an IP address to an endpoint, see AssociateResolverEndpointIpAddress .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/DisassociateResolverEndpointIpAddress>`_
**Request Syntax**
::
response = client.disassociate_resolver_endpoint_ip_address(
ResolverEndpointId='string',
IpAddress={
'IpId': 'string',
'SubnetId': 'string',
'Ip': 'string'
}
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
The response to an ``DisassociateResolverEndpointIpAddress`` request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to disassociate an IP address from.
:type IpAddress: dict
:param IpAddress: **[REQUIRED]**
The IPv4 address that you want to remove from a resolver endpoint.
- **IpId** *(string) --*
*Only when removing an IP address from a resolver endpoint* : The ID of the IP address that you want to remove. To get this ID, use GetResolverEndpoint .
- **SubnetId** *(string) --*
The ID of the subnet that includes the IP address that you want to update. To get this ID, use GetResolverEndpoint .
- **Ip** *(string) --*
The new IP address.
:rtype: dict
:returns:
"""
pass
def disassociate_resolver_rule(self, VPCId: str, ResolverRuleId: str) -> Dict:
"""
Removes the association between a specified resolver rule and a specified VPC.
.. warning::
If you disassociate a resolver rule from a VPC, Resolver stops forwarding DNS queries for the domain name that you specified in the resolver rule.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/DisassociateResolverRule>`_
**Request Syntax**
::
response = client.disassociate_resolver_rule(
VPCId='string',
ResolverRuleId='string'
)
**Response Syntax**
::
{
'ResolverRuleAssociation': {
'Id': 'string',
'ResolverRuleId': 'string',
'Name': 'string',
'VPCId': 'string',
'Status': 'CREATING'|'COMPLETE'|'DELETING'|'FAILED'|'OVERRIDDEN',
'StatusMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRuleAssociation** *(dict) --*
Information about the ``DisassociateResolverRule`` request, including the status of the request.
- **Id** *(string) --*
The ID of the association between a resolver rule and a VPC. Resolver assigns this value when you submit an AssociateResolverRule request.
- **ResolverRuleId** *(string) --*
The ID of the resolver rule that you associated with the VPC that is specified by ``VPCId`` .
- **Name** *(string) --*
The name of an association between a resolver rule and a VPC.
- **VPCId** *(string) --*
The ID of the VPC that you associated the resolver rule with.
- **Status** *(string) --*
A code that specifies the current status of the association between a resolver rule and a VPC.
- **StatusMessage** *(string) --*
A detailed description of the status of the association between a resolver rule and a VPC.
:type VPCId: string
:param VPCId: **[REQUIRED]**
The ID of the VPC that you want to disassociate the resolver rule from.
:type ResolverRuleId: string
:param ResolverRuleId: **[REQUIRED]**
The ID of the resolver rule that you want to disassociate from the specified VPC.
:rtype: dict
:returns:
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_resolver_endpoint(self, ResolverEndpointId: str) -> Dict:
"""
Gets information about a specified resolver endpoint, such as whether it's an inbound or an outbound resolver endpoint, and the current status of the endpoint.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/GetResolverEndpoint>`_
**Request Syntax**
::
response = client.get_resolver_endpoint(
ResolverEndpointId='string'
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
Information about the resolver endpoint that you specified in a ``GetResolverEndpoint`` request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to get information about.
:rtype: dict
:returns:
"""
pass
def get_resolver_rule(self, ResolverRuleId: str) -> Dict:
"""
Gets information about a specified resolver rule, such as the domain name that the rule forwards DNS queries for and the ID of the outbound resolver endpoint that the rule is associated with.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/GetResolverRule>`_
**Request Syntax**
::
response = client.get_resolver_rule(
ResolverRuleId='string'
)
**Response Syntax**
::
{
'ResolverRule': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'DomainName': 'string',
'Status': 'COMPLETE'|'DELETING'|'UPDATING'|'FAILED',
'StatusMessage': 'string',
'RuleType': 'FORWARD'|'SYSTEM'|'RECURSIVE',
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string',
'OwnerId': 'string',
'ShareStatus': 'NOT_SHARED'|'SHARED_WITH_ME'|'SHARED_BY_ME'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRule** *(dict) --*
Information about the resolver rule that you specified in a ``GetResolverRule`` request.
- **Id** *(string) --*
The ID that Resolver assigned to the resolver rule when you created it.
- **CreatorRequestId** *(string) --*
A unique string that you specified when you created the resolver rule. ``CreatorRequestId`` identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver rule specified by ``Id`` .
- **DomainName** *(string) --*
DNS queries for this domain name are forwarded to the IP addresses that are specified in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), the query is routed using the resolver rule that contains the most specific domain name (www.example.com).
- **Status** *(string) --*
A code that specifies the current status of the resolver rule.
- **StatusMessage** *(string) --*
A detailed description of the status of a resolver rule.
- **RuleType** *(string) --*
This value is always ``FORWARD`` . Other resolver rule types aren't supported.
- **Name** *(string) --*
The name for the resolver rule, which you specified when you created the resolver rule.
- **TargetIps** *(list) --*
An array that contains the IP addresses and ports that you want to forward
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --*
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the endpoint that the rule is associated with.
- **OwnerId** *(string) --*
When a rule is shared with another AWS account, the account ID of the account that the rule is shared with.
- **ShareStatus** *(string) --*
Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account.
:type ResolverRuleId: string
:param ResolverRuleId: **[REQUIRED]**
The ID of the resolver rule that you want to get information about.
:rtype: dict
:returns:
"""
pass
def get_resolver_rule_association(self, ResolverRuleAssociationId: str) -> Dict:
"""
Gets information about an association between a specified resolver rule and a VPC. You associate a resolver rule and a VPC using AssociateResolverRule .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/GetResolverRuleAssociation>`_
**Request Syntax**
::
response = client.get_resolver_rule_association(
ResolverRuleAssociationId='string'
)
**Response Syntax**
::
{
'ResolverRuleAssociation': {
'Id': 'string',
'ResolverRuleId': 'string',
'Name': 'string',
'VPCId': 'string',
'Status': 'CREATING'|'COMPLETE'|'DELETING'|'FAILED'|'OVERRIDDEN',
'StatusMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRuleAssociation** *(dict) --*
Information about the resolver rule association that you specified in a ``GetResolverRuleAssociation`` request.
- **Id** *(string) --*
The ID of the association between a resolver rule and a VPC. Resolver assigns this value when you submit an AssociateResolverRule request.
- **ResolverRuleId** *(string) --*
The ID of the resolver rule that you associated with the VPC that is specified by ``VPCId`` .
- **Name** *(string) --*
The name of an association between a resolver rule and a VPC.
- **VPCId** *(string) --*
The ID of the VPC that you associated the resolver rule with.
- **Status** *(string) --*
A code that specifies the current status of the association between a resolver rule and a VPC.
- **StatusMessage** *(string) --*
A detailed description of the status of the association between a resolver rule and a VPC.
:type ResolverRuleAssociationId: string
:param ResolverRuleAssociationId: **[REQUIRED]**
The ID of the resolver rule association that you want to get information about.
:rtype: dict
:returns:
"""
pass
def get_resolver_rule_policy(self, Arn: str) -> Dict:
"""
Gets information about a resolver rule policy. A resolver rule policy specifies the Resolver operations and resources that you want to allow another AWS account to be able to use.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/GetResolverRulePolicy>`_
**Request Syntax**
::
response = client.get_resolver_rule_policy(
Arn='string'
)
**Response Syntax**
::
{
'ResolverRulePolicy': 'string'
}
**Response Structure**
- *(dict) --*
- **ResolverRulePolicy** *(string) --*
Information about the resolver rule policy that you specified in a ``GetResolverRulePolicy`` request.
:type Arn: string
:param Arn: **[REQUIRED]**
The ID of the resolver rule policy that you want to get information about.
:rtype: dict
:returns:
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def list_resolver_endpoint_ip_addresses(self, ResolverEndpointId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Gets the IP addresses for a specified resolver endpoint.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ListResolverEndpointIpAddresses>`_
**Request Syntax**
::
response = client.list_resolver_endpoint_ip_addresses(
ResolverEndpointId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'NextToken': 'string',
'MaxResults': 123,
'IpAddresses': [
{
'IpId': 'string',
'SubnetId': 'string',
'Ip': 'string',
'Status': 'CREATING'|'FAILED_CREATION'|'ATTACHING'|'ATTACHED'|'REMAP_DETACHING'|'REMAP_ATTACHING'|'DETACHING'|'FAILED_RESOURCE_GONE'|'DELETING'|'DELETE_FAILED_FAS_EXPIRED',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
If the specified endpoint has more than ``MaxResults`` IP addresses, you can submit another ``ListResolverEndpointIpAddresses`` request to get the next group of IP addresses. In the next request, specify the value of ``NextToken`` from the previous response.
- **MaxResults** *(integer) --*
The value that you specified for ``MaxResults`` in the request.
- **IpAddresses** *(list) --*
The IP addresses that DNS queries pass through on their way to your network (outbound endpoint) or on the way to Resolver (inbound endpoint).
- *(dict) --*
In the response to a GetResolverEndpoint request, information about the IP addresses that the resolver endpoint uses for DNS queries.
- **IpId** *(string) --*
The ID of one IP address.
- **SubnetId** *(string) --*
The ID of one subnet.
- **Ip** *(string) --*
One IP address that the resolver endpoint uses for DNS queries.
- **Status** *(string) --*
A status code that gives the current status of the request.
- **StatusMessage** *(string) --*
A message that provides additional information about the status of the request.
- **CreationTime** *(string) --*
The date and time that the IP address was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the IP address was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to get IP addresses for.
:type MaxResults: integer
:param MaxResults:
The maximum number of IP addresses that you want to return in the response to a ``ListResolverEndpointIpAddresses`` request. If you don\'t specify a value for ``MaxResults`` , Resolver returns up to 100 IP addresses.
:type NextToken: string
:param NextToken:
For the first ``ListResolverEndpointIpAddresses`` request, omit this value.
If the specified resolver endpoint has more than ``MaxResults`` IP addresses, you can submit another ``ListResolverEndpointIpAddresses`` request to get the next group of IP addresses. In the next request, specify the value of ``NextToken`` from the previous response.
:rtype: dict
:returns:
"""
pass
def list_resolver_endpoints(self, MaxResults: int = None, NextToken: str = None, Filters: List = None) -> Dict:
"""
Lists all the resolver endpoints that were created using the current AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ListResolverEndpoints>`_
**Request Syntax**
::
response = client.list_resolver_endpoints(
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
**Response Syntax**
::
{
'NextToken': 'string',
'MaxResults': 123,
'ResolverEndpoints': [
{
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
If more than ``MaxResults`` IP addresses match the specified criteria, you can submit another ``ListResolverEndpoint`` request to get the next group of results. In the next request, specify the value of ``NextToken`` from the previous response.
- **MaxResults** *(integer) --*
The value that you specified for ``MaxResults`` in the request.
- **ResolverEndpoints** *(list) --*
The resolver endpoints that were created by using the current AWS account, and that match the specified filters, if any.
- *(dict) --*
In the response to a CreateResolverEndpoint , DeleteResolverEndpoint , GetResolverEndpoint , ListResolverEndpoints , or UpdateResolverEndpoint request, a complex type that contains settings for an existing inbound or outbound resolver endpoint.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type MaxResults: integer
:param MaxResults:
The maximum number of resolver endpoints that you want to return in the response to a ``ListResolverEndpoints`` request. If you don\'t specify a value for ``MaxResults`` , Resolver returns up to 100 resolver endpoints.
:type NextToken: string
:param NextToken:
For the first ``ListResolverEndpoints`` request, omit this value.
If you have more than ``MaxResults`` resolver endpoints, you can submit another ``ListResolverEndpoints`` request to get the next group of resolver endpoints. In the next request, specify the value of ``NextToken`` from the previous response.
:type Filters: list
:param Filters:
An optional specification to return a subset of resolver endpoints, such as all inbound resolver endpoints.
.. note::
If you submit a second or subsequent ``ListResolverEndpoints`` request and specify the ``NextToken`` parameter, you must use the same values for ``Filters`` , if any, as in the previous request.
- *(dict) --*
For ``List`` operations, an optional specification to return a subset of objects, such as resolver endpoints or resolver rules.
- **Name** *(string) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the name of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``Direction`` for the value of ``Name`` .
- **Values** *(list) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the value of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``INBOUND`` for the value of ``Values`` .
- *(string) --*
:rtype: dict
:returns:
"""
pass
def list_resolver_rule_associations(self, MaxResults: int = None, NextToken: str = None, Filters: List = None) -> Dict:
"""
Lists the associations that were created between resolver rules and VPCs using the current AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ListResolverRuleAssociations>`_
**Request Syntax**
::
response = client.list_resolver_rule_associations(
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
**Response Syntax**
::
{
'NextToken': 'string',
'MaxResults': 123,
'ResolverRuleAssociations': [
{
'Id': 'string',
'ResolverRuleId': 'string',
'Name': 'string',
'VPCId': 'string',
'Status': 'CREATING'|'COMPLETE'|'DELETING'|'FAILED'|'OVERRIDDEN',
'StatusMessage': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
If more than ``MaxResults`` rule associations match the specified criteria, you can submit another ``ListResolverRuleAssociation`` request to get the next group of results. In the next request, specify the value of ``NextToken`` from the previous response.
- **MaxResults** *(integer) --*
The value that you specified for ``MaxResults`` in the request.
- **ResolverRuleAssociations** *(list) --*
The associations that were created between resolver rules and VPCs using the current AWS account, and that match the specified filters, if any.
- *(dict) --*
In the response to an AssociateResolverRule , DisassociateResolverRule , or ListResolverRuleAssociations request, information about an association between a resolver rule and a VPC.
- **Id** *(string) --*
The ID of the association between a resolver rule and a VPC. Resolver assigns this value when you submit an AssociateResolverRule request.
- **ResolverRuleId** *(string) --*
The ID of the resolver rule that you associated with the VPC that is specified by ``VPCId`` .
- **Name** *(string) --*
The name of an association between a resolver rule and a VPC.
- **VPCId** *(string) --*
The ID of the VPC that you associated the resolver rule with.
- **Status** *(string) --*
A code that specifies the current status of the association between a resolver rule and a VPC.
- **StatusMessage** *(string) --*
A detailed description of the status of the association between a resolver rule and a VPC.
:type MaxResults: integer
:param MaxResults:
The maximum number of rule associations that you want to return in the response to a ``ListResolverRuleAssociations`` request. If you don\'t specify a value for ``MaxResults`` , Resolver returns up to 100 rule associations.
:type NextToken: string
:param NextToken:
For the first ``ListResolverRuleAssociation`` request, omit this value.
If you have more than ``MaxResults`` rule associations, you can submit another ``ListResolverRuleAssociation`` request to get the next group of rule associations. In the next request, specify the value of ``NextToken`` from the previous response.
:type Filters: list
:param Filters:
An optional specification to return a subset of resolver rules, such as resolver rules that are associated with the same VPC ID.
.. note::
If you submit a second or subsequent ``ListResolverRuleAssociations`` request and specify the ``NextToken`` parameter, you must use the same values for ``Filters`` , if any, as in the previous request.
- *(dict) --*
For ``List`` operations, an optional specification to return a subset of objects, such as resolver endpoints or resolver rules.
- **Name** *(string) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the name of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``Direction`` for the value of ``Name`` .
- **Values** *(list) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the value of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``INBOUND`` for the value of ``Values`` .
- *(string) --*
:rtype: dict
:returns:
"""
pass
def list_resolver_rules(self, MaxResults: int = None, NextToken: str = None, Filters: List = None) -> Dict:
"""
Lists the resolver rules that were created using the current AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ListResolverRules>`_
**Request Syntax**
::
response = client.list_resolver_rules(
MaxResults=123,
NextToken='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
**Response Syntax**
::
{
'NextToken': 'string',
'MaxResults': 123,
'ResolverRules': [
{
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'DomainName': 'string',
'Status': 'COMPLETE'|'DELETING'|'UPDATING'|'FAILED',
'StatusMessage': 'string',
'RuleType': 'FORWARD'|'SYSTEM'|'RECURSIVE',
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string',
'OwnerId': 'string',
'ShareStatus': 'NOT_SHARED'|'SHARED_WITH_ME'|'SHARED_BY_ME'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
If more than ``MaxResults`` resolver rules match the specified criteria, you can submit another ``ListResolverRules`` request to get the next group of results. In the next request, specify the value of ``NextToken`` from the previous response.
- **MaxResults** *(integer) --*
The value that you specified for ``MaxResults`` in the request.
- **ResolverRules** *(list) --*
The resolver rules that were created using the current AWS account and that match the specified filters, if any.
- *(dict) --*
For queries that originate in your VPC, detailed information about a resolver rule, which specifies how to route DNS queries out of the VPC. The ``ResolverRule`` parameter appears in the response to a CreateResolverRule , DeleteResolverRule , GetResolverRule , ListResolverRules , or UpdateResolverRule request.
- **Id** *(string) --*
The ID that Resolver assigned to the resolver rule when you created it.
- **CreatorRequestId** *(string) --*
A unique string that you specified when you created the resolver rule. ``CreatorRequestId`` identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver rule specified by ``Id`` .
- **DomainName** *(string) --*
DNS queries for this domain name are forwarded to the IP addresses that are specified in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), the query is routed using the resolver rule that contains the most specific domain name (www.example.com).
- **Status** *(string) --*
A code that specifies the current status of the resolver rule.
- **StatusMessage** *(string) --*
A detailed description of the status of a resolver rule.
- **RuleType** *(string) --*
This value is always ``FORWARD`` . Other resolver rule types aren't supported.
- **Name** *(string) --*
The name for the resolver rule, which you specified when you created the resolver rule.
- **TargetIps** *(list) --*
An array that contains the IP addresses and ports that you want to forward
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --*
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the endpoint that the rule is associated with.
- **OwnerId** *(string) --*
When a rule is shared with another AWS account, the account ID of the account that the rule is shared with.
- **ShareStatus** *(string) --*
Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account.
:type MaxResults: integer
:param MaxResults:
The maximum number of resolver rules that you want to return in the response to a ``ListResolverRules`` request. If you don\'t specify a value for ``MaxResults`` , Resolver returns up to 100 resolver rules.
:type NextToken: string
:param NextToken:
For the first ``ListResolverRules`` request, omit this value.
If you have more than ``MaxResults`` resolver rules, you can submit another ``ListResolverRules`` request to get the next group of resolver rules. In the next request, specify the value of ``NextToken`` from the previous response.
:type Filters: list
:param Filters:
An optional specification to return a subset of resolver rules, such as all resolver rules that are associated with the same resolver endpoint.
.. note::
If you submit a second or subsequent ``ListResolverRules`` request and specify the ``NextToken`` parameter, you must use the same values for ``Filters`` , if any, as in the previous request.
- *(dict) --*
For ``List`` operations, an optional specification to return a subset of objects, such as resolver endpoints or resolver rules.
- **Name** *(string) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the name of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``Direction`` for the value of ``Name`` .
- **Values** *(list) --*
When you\'re using a ``List`` operation and you want the operation to return a subset of objects, such as resolver endpoints or resolver rules, the value of the parameter that you want to use to filter objects. For example, to list only inbound resolver endpoints, specify ``INBOUND`` for the value of ``Values`` .
- *(string) --*
:rtype: dict
:returns:
"""
pass
def list_tags_for_resource(self, ResourceArn: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Lists the tags that you associated with the specified resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ListTagsForResource>`_
**Request Syntax**
::
response = client.list_tags_for_resource(
ResourceArn='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Tags** *(list) --*
The tags that are associated with the resource that you specified in the ``ListTagsForResource`` request.
- *(dict) --*
One tag that you want to add to the specified resource. A tag consists of a ``Key`` (a name for the tag) and a ``Value`` .
- **Key** *(string) --*
The name for the tag. For example, if you want to associate Resolver resources with the account IDs of your customers for billing purposes, the value of ``Key`` might be ``account-id`` .
- **Value** *(string) --*
The value for the tag. For example, if ``Key`` is ``account-id`` , then ``Value`` might be the ID of the customer account that you're creating the resource for.
- **NextToken** *(string) --*
If more than ``MaxResults`` tags match the specified criteria, you can submit another ``ListTagsForResource`` request to get the next group of results. In the next request, specify the value of ``NextToken`` from the previous response.
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) for the resource that you want to list tags for.
:type MaxResults: integer
:param MaxResults:
The maximum number of tags that you want to return in the response to a ``ListTagsForResource`` request. If you don\'t specify a value for ``MaxResults`` , Resolver returns up to 100 tags.
:type NextToken: string
:param NextToken:
For the first ``ListTagsForResource`` request, omit this value.
If you have more than ``MaxResults`` tags, you can submit another ``ListTagsForResource`` request to get the next group of tags for the resource. In the next request, specify the value of ``NextToken`` from the previous response.
:rtype: dict
:returns:
"""
pass
def put_resolver_rule_policy(self, Arn: str, ResolverRulePolicy: str) -> Dict:
"""
Specifies the Resolver operations and resources that you want to allow another AWS account to be able to use.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/PutResolverRulePolicy>`_
**Request Syntax**
::
response = client.put_resolver_rule_policy(
Arn='string',
ResolverRulePolicy='string'
)
**Response Syntax**
::
{
'ReturnValue': True|False
}
**Response Structure**
- *(dict) --*
The response to a ``PutResolverRulePolicy`` request.
- **ReturnValue** *(boolean) --*
Whether the ``PutResolverRulePolicy`` request was successful.
:type Arn: string
:param Arn: **[REQUIRED]**
The Amazon Resource Name (ARN) of the account that you want to grant permissions to.
:type ResolverRulePolicy: string
:param ResolverRulePolicy: **[REQUIRED]**
An AWS Identity and Access Management policy statement that lists the permissions that you want to grant to another AWS account.
:rtype: dict
:returns:
"""
pass
def tag_resource(self, ResourceArn: str, Tags: List) -> Dict:
"""
Adds one or more tags to a specified resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/TagResource>`_
**Request Syntax**
::
response = client.tag_resource(
ResourceArn='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) for the resource that you want to add tags to. To get the ARN for a resource, use the applicable ``Get`` or ``List`` command:
* GetResolverEndpoint
* GetResolverRule
* GetResolverRuleAssociation
* ListResolverEndpoints
* ListResolverRuleAssociations
* ListResolverRules
:type Tags: list
:param Tags: **[REQUIRED]**
The tags that you want to add to the specified resource.
- *(dict) --*
One tag that you want to add to the specified resource. A tag consists of a ``Key`` (a name for the tag) and a ``Value`` .
- **Key** *(string) --*
The name for the tag. For example, if you want to associate Resolver resources with the account IDs of your customers for billing purposes, the value of ``Key`` might be ``account-id`` .
- **Value** *(string) --*
The value for the tag. For example, if ``Key`` is ``account-id`` , then ``Value`` might be the ID of the customer account that you\'re creating the resource for.
:rtype: dict
:returns:
"""
pass
def untag_resource(self, ResourceArn: str, TagKeys: List) -> Dict:
"""
Removes one or more tags from a specified resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/UntagResource>`_
**Request Syntax**
::
response = client.untag_resource(
ResourceArn='string',
TagKeys=[
'string',
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) for the resource that you want to remove tags from. To get the ARN for a resource, use the applicable ``Get`` or ``List`` command:
* GetResolverEndpoint
* GetResolverRule
* GetResolverRuleAssociation
* ListResolverEndpoints
* ListResolverRuleAssociations
* ListResolverRules
:type TagKeys: list
:param TagKeys: **[REQUIRED]**
The tags that you want to remove to the specified resource.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def update_resolver_endpoint(self, ResolverEndpointId: str, Name: str = None) -> Dict:
"""
Updates the name of an inbound or an outbound resolver endpoint.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/UpdateResolverEndpoint>`_
**Request Syntax**
::
response = client.update_resolver_endpoint(
ResolverEndpointId='string',
Name='string'
)
**Response Syntax**
::
{
'ResolverEndpoint': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'Name': 'string',
'SecurityGroupIds': [
'string',
],
'Direction': 'INBOUND'|'OUTBOUND',
'IpAddressCount': 123,
'HostVPCId': 'string',
'Status': 'CREATING'|'OPERATIONAL'|'UPDATING'|'AUTO_RECOVERING'|'ACTION_NEEDED'|'DELETING',
'StatusMessage': 'string',
'CreationTime': 'string',
'ModificationTime': 'string'
}
}
**Response Structure**
- *(dict) --*
- **ResolverEndpoint** *(dict) --*
The response to an ``UpdateResolverEndpoint`` request.
- **Id** *(string) --*
The ID of the resolver endpoint.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request that created the resolver endpoint. The ``CreatorRequestId`` allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver endpoint.
- **Name** *(string) --*
The name that you assigned to the resolver endpoint when you submitted a CreateResolverEndpoint request.
- **SecurityGroupIds** *(list) --*
The ID of one or more security groups that control access to this VPC. The security group must include one or more inbound resolver rules.
- *(string) --*
- **Direction** *(string) --*
Indicates whether the resolver endpoint allows inbound or outbound DNS queries:
* ``INBOUND`` : allows DNS queries to your VPC from your network or another VPC
* ``OUTBOUND`` : allows DNS queries from your VPC to your network or another VPC
- **IpAddressCount** *(integer) --*
The number of IP addresses that the resolver endpoint can use for DNS queries.
- **HostVPCId** *(string) --*
The ID of the VPC that you want to create the resolver endpoint in.
- **Status** *(string) --*
A code that specifies the current status of the resolver endpoint.
- **StatusMessage** *(string) --*
A detailed description of the status of the resolver endpoint.
- **CreationTime** *(string) --*
The date and time that the endpoint was created, in Unix time format and Coordinated Universal Time (UTC).
- **ModificationTime** *(string) --*
The date and time that the endpoint was last modified, in Unix time format and Coordinated Universal Time (UTC).
:type ResolverEndpointId: string
:param ResolverEndpointId: **[REQUIRED]**
The ID of the resolver endpoint that you want to update.
:type Name: string
:param Name:
The name of the resolver endpoint that you want to update.
:rtype: dict
:returns:
"""
pass
def update_resolver_rule(self, ResolverRuleId: str, Config: Dict) -> Dict:
"""
Updates settings for a specified resolver rule. ``ResolverRuleId`` is required, and all other parameters are optional. If you don't specify a parameter, it retains its current value.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/UpdateResolverRule>`_
**Request Syntax**
::
response = client.update_resolver_rule(
ResolverRuleId='string',
Config={
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string'
}
)
**Response Syntax**
::
{
'ResolverRule': {
'Id': 'string',
'CreatorRequestId': 'string',
'Arn': 'string',
'DomainName': 'string',
'Status': 'COMPLETE'|'DELETING'|'UPDATING'|'FAILED',
'StatusMessage': 'string',
'RuleType': 'FORWARD'|'SYSTEM'|'RECURSIVE',
'Name': 'string',
'TargetIps': [
{
'Ip': 'string',
'Port': 123
},
],
'ResolverEndpointId': 'string',
'OwnerId': 'string',
'ShareStatus': 'NOT_SHARED'|'SHARED_WITH_ME'|'SHARED_BY_ME'
}
}
**Response Structure**
- *(dict) --*
- **ResolverRule** *(dict) --*
The response to an ``UpdateResolverRule`` request.
- **Id** *(string) --*
The ID that Resolver assigned to the resolver rule when you created it.
- **CreatorRequestId** *(string) --*
A unique string that you specified when you created the resolver rule. ``CreatorRequestId`` identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **Arn** *(string) --*
The ARN (Amazon Resource Name) for the resolver rule specified by ``Id`` .
- **DomainName** *(string) --*
DNS queries for this domain name are forwarded to the IP addresses that are specified in ``TargetIps`` . If a query matches multiple resolver rules (example.com and www.example.com), the query is routed using the resolver rule that contains the most specific domain name (www.example.com).
- **Status** *(string) --*
A code that specifies the current status of the resolver rule.
- **StatusMessage** *(string) --*
A detailed description of the status of a resolver rule.
- **RuleType** *(string) --*
This value is always ``FORWARD`` . Other resolver rule types aren't supported.
- **Name** *(string) --*
The name for the resolver rule, which you specified when you created the resolver rule.
- **TargetIps** *(list) --*
An array that contains the IP addresses and ports that you want to forward
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --*
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the endpoint that the rule is associated with.
- **OwnerId** *(string) --*
When a rule is shared with another AWS account, the account ID of the account that the rule is shared with.
- **ShareStatus** *(string) --*
Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account.
:type ResolverRuleId: string
:param ResolverRuleId: **[REQUIRED]**
The ID of the resolver rule that you want to update.
:type Config: dict
:param Config: **[REQUIRED]**
The new settings for the resolver rule.
- **Name** *(string) --*
The new name for the resolver rule. The name that you specify appears in the Resolver dashboard in the Route 53 console.
- **TargetIps** *(list) --*
For DNS queries that originate in your VPC, the new IP addresses that you want to route outbound DNS queries to.
- *(dict) --*
In a CreateResolverRule request, an array of the IPs that you want to forward DNS queries to.
- **Ip** *(string) --* **[REQUIRED]**
One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses.
- **Port** *(integer) --*
The port at ``Ip`` that you want to forward DNS queries to.
- **ResolverEndpointId** *(string) --*
The ID of the new outbound resolver endpoint that you want to use to route DNS queries to the IP addresses that you specify in ``TargetIps`` .
:rtype: dict
:returns:
"""
pass
| 55.472189
| 360
| 0.560471
| 9,754
| 93,748
| 5.366311
| 0.044392
| 0.029842
| 0.016163
| 0.021856
| 0.847869
| 0.823625
| 0.793019
| 0.776302
| 0.752727
| 0.746117
| 0
| 0.005292
| 0.346973
| 93,748
| 1,689
| 361
| 55.505033
| 0.849706
| 0.828039
| 0
| 0.433333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.433333
| false
| 0.433333
| 0.116667
| 0
| 0.566667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
5303dc257d1c12aab42b1a0de1894f0354538dc4
| 2,458
|
py
|
Python
|
tests/test_config_from_string.py
|
CrossNox/YouConfigMe
|
79805e0d1b125bffe43f3c3277e36c4980322fab
|
[
"MIT"
] | 8
|
2020-02-28T23:01:01.000Z
|
2021-03-24T04:04:21.000Z
|
tests/test_config_from_string.py
|
CrossNox/YouConfigMe
|
79805e0d1b125bffe43f3c3277e36c4980322fab
|
[
"MIT"
] | 4
|
2020-04-05T11:20:20.000Z
|
2020-10-11T14:41:19.000Z
|
tests/test_config_from_string.py
|
CrossNox/YouConfigMe
|
79805e0d1b125bffe43f3c3277e36c4980322fab
|
[
"MIT"
] | null | null | null |
"""Config tests from a string"""
# pylint: disable=redefined-outer-name
# pylint: disable=missing-function-docstring
import pytest
from youconfigme import Config, ConfigItemNotFound
@pytest.fixture
def config_from_str():
config_string = """[a]
k1=1
k2=2
[b]
k3=3
k4=4
"""
return Config(from_items=config_string)
def test_config_from_str_sa_to_dict(config_from_str):
assert config_from_str.a.to_dict() == {'k1': '1', 'k2': '2'}
def test_config_from_str_sb_to_dict(config_from_str):
assert config_from_str.b.to_dict() == {'k3': '3', 'k4': '4'}
def test_config_from_str_to_dict(config_from_str):
assert config_from_str.to_dict() == {
'a': {'k1': '1', 'k2': '2'},
'b': {'k3': '3', 'k4': '4'},
}
def test_config_from_str_sa_k1(config_from_str):
assert config_from_str.a.k1() == '1'
assert config_from_str.a.k1(cast=int) == 1
assert config_from_str.a.k1(default='z') == '1'
assert config_from_str.a.k1(default='z', cast=int) == 1
def test_config_from_str_sa_k2(config_from_str):
assert config_from_str.a.k2() == '2'
assert config_from_str.a.k2(cast=int) == 2
assert config_from_str.a.k2(default='z') == '2'
assert config_from_str.a.k2(default='z', cast=int) == 2
def test_config_from_str_sa_k7(config_from_str):
assert config_from_str.a.k7(default='7') == '7'
assert config_from_str.a.k7(default='7', cast=int) == 7
def test_config_from_str_sa_k7_raise(config_from_str):
with pytest.raises(ConfigItemNotFound):
config_from_str.a.k7()
def test_config_from_str_sb_k3(config_from_str):
assert config_from_str.b.k3() == '3'
assert config_from_str.b.k3(cast=int) == 3
assert config_from_str.b.k3(default='z') == '3'
assert config_from_str.b.k3(default='z', cast=int) == 3
def test_config_from_str_sb_k4(config_from_str):
assert config_from_str.b.k4() == '4'
assert config_from_str.b.k4(cast=int) == 4
assert config_from_str.b.k4(default='z') == '4'
assert config_from_str.b.k4(default='z', cast=int) == 4
def test_config_from_str_sb_k7(config_from_str):
assert config_from_str.b.k7(default='7') == '7'
assert config_from_str.b.k7(default='7', cast=int) == 7
def test_config_from_str_sb_k7_raise(config_from_str):
with pytest.raises(ConfigItemNotFound):
config_from_str.b.k7()
def test_config_from_str_sc(config_from_str):
with pytest.raises(ConfigItemNotFound):
config_from_str.c.a()
| 27.617978
| 64
| 0.70057
| 418
| 2,458
| 3.767943
| 0.114833
| 0.330159
| 0.420952
| 0.27746
| 0.804444
| 0.799365
| 0.706032
| 0.617143
| 0.448254
| 0.189206
| 0
| 0.037267
| 0.148495
| 2,458
| 88
| 65
| 27.931818
| 0.715241
| 0.043531
| 0
| 0.053571
| 0
| 0
| 0.03285
| 0
| 0
| 0
| 0
| 0
| 0.410714
| 1
| 0.232143
| false
| 0
| 0.035714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5312700bef1d906a1e0d3a086b76e7d36dd16e6d
| 28,184
|
py
|
Python
|
external/model-preparation-algorithm/tests/test_ote_training.py
|
opencv/openvino_training_extensions
|
f5d809741e192a2345558efc75899a475019cf98
|
[
"Apache-2.0"
] | 775
|
2019-03-01T02:13:33.000Z
|
2020-09-07T22:49:15.000Z
|
external/model-preparation-algorithm/tests/test_ote_training.py
|
opencv/openvino_training_extensions
|
f5d809741e192a2345558efc75899a475019cf98
|
[
"Apache-2.0"
] | 229
|
2019-02-28T21:37:08.000Z
|
2020-09-07T15:11:49.000Z
|
external/model-preparation-algorithm/tests/test_ote_training.py
|
opencv/openvino_training_extensions
|
f5d809741e192a2345558efc75899a475019cf98
|
[
"Apache-2.0"
] | 290
|
2019-02-28T20:32:11.000Z
|
2020-09-07T05:51:41.000Z
|
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import logging
import os
import os.path as osp
from collections import namedtuple
from copy import deepcopy
from pprint import pformat
from typing import Any, Callable, Dict, List, Optional, Type
import pytest
from ote_sdk.entities.datasets import DatasetEntity
from ote_sdk.entities.label import Domain
from ote_sdk.entities.label_schema import LabelSchemaEntity
from ote_sdk.entities.subset import Subset
from torchreid_tasks.utils import ClassificationDatasetAdapter
from detection_tasks.extension.datasets.data_utils import load_dataset_items_coco_format
from segmentation_tasks.extension.datasets.mmdataset import load_dataset_items
from ote_sdk.test_suite.e2e_test_system import DataCollector, e2e_pytest_performance
from ote_sdk.test_suite.training_test_case import (OTETestCaseInterface,
generate_ote_integration_test_case_class)
from ote_sdk.test_suite.training_tests_common import (make_path_be_abs,
make_paths_be_abs,
KEEP_CONFIG_FIELD_VALUE,
REALLIFE_USECASE_CONSTANT,
ROOT_PATH_KEY)
from ote_sdk.test_suite.training_tests_helper import (OTETestHelper,
DefaultOTETestCreationParametersInterface,
OTETrainingTestInterface)
from ote_sdk.test_suite.training_tests_actions import (OTETestTrainingAction,
BaseOTETestAction,
OTETestTrainingEvaluationAction,
OTETestExportAction,
OTETestExportEvaluationAction,
OTETestPotAction,
OTETestPotEvaluationAction)
logger = logging.getLogger(__name__)
def DATASET_PARAMETERS_FIELDS() -> List[str]:
return deepcopy(['annotations_train',
'images_train_dir',
'annotations_val',
'images_val_dir',
'annotations_test',
'images_test_dir',
'pre_trained_model',
])
DatasetParameters = namedtuple('DatasetParameters', DATASET_PARAMETERS_FIELDS())
def get_test_action_classes() -> List[Type[BaseOTETestAction]]:
return [
OTETestTrainingAction,
OTETestTrainingEvaluationAction,
OTETestExportAction,
OTETestExportEvaluationAction,
OTETestPotAction,
OTETestPotEvaluationAction,
]
def _get_dataset_params_from_dataset_definitions(dataset_definitions, dataset_name):
if dataset_name not in dataset_definitions:
raise ValueError(f'dataset {dataset_name} is absent in dataset_definitions, '
f'dataset_definitions.keys={list(dataset_definitions.keys())}')
cur_dataset_definition = dataset_definitions[dataset_name]
training_parameters_fields = {k: v for k, v in cur_dataset_definition.items()
if k in DATASET_PARAMETERS_FIELDS()}
make_paths_be_abs(training_parameters_fields, dataset_definitions[ROOT_PATH_KEY])
assert set(DATASET_PARAMETERS_FIELDS()) == set(training_parameters_fields.keys()), \
f'ERROR: dataset definitions for name={dataset_name} does not contain all required fields'
assert all(training_parameters_fields.values()), \
f'ERROR: dataset definitions for name={dataset_name} contains empty values for some required fields'
params = DatasetParameters(**training_parameters_fields)
return params
def _create_classification_dataset_and_labels_schema(dataset_params, model_name):
logger.debug(f'Using for train annotation file {dataset_params.annotations_train}')
logger.debug(f'Using for val annotation file {dataset_params.annotations_val}')
dataset = ClassificationDatasetAdapter(
train_data_root=osp.join(dataset_params.images_train_dir),
train_ann_file=osp.join(dataset_params.annotations_train),
val_data_root=osp.join(dataset_params.images_val_dir),
val_ann_file=osp.join(dataset_params.annotations_val),
test_data_root=osp.join(dataset_params.images_test_dir),
test_ann_file=osp.join(dataset_params.annotations_test))
labels_schema = LabelSchemaEntity.from_labels(dataset.get_labels())
return dataset, labels_schema
def _create_object_detection_dataset_and_labels_schema(dataset_params):
logger.debug(f'Using for train annotation file {dataset_params.annotations_train}')
logger.debug(f'Using for val annotation file {dataset_params.annotations_val}')
labels_list = []
items = []
items.extend(load_dataset_items_coco_format(
ann_file_path=dataset_params.annotations_train,
data_root_dir=dataset_params.images_train_dir,
domain=Domain.DETECTION,
subset=Subset.TRAINING,
labels_list=labels_list))
items.extend(load_dataset_items_coco_format(
ann_file_path=dataset_params.annotations_val,
data_root_dir=dataset_params.images_val_dir,
domain=Domain.DETECTION,
subset=Subset.VALIDATION,
labels_list=labels_list))
items.extend(load_dataset_items_coco_format(
ann_file_path=dataset_params.annotations_test,
data_root_dir=dataset_params.images_test_dir,
domain=Domain.DETECTION,
subset=Subset.TESTING,
labels_list=labels_list))
dataset = DatasetEntity(items=items)
labels_schema = LabelSchemaEntity.from_labels(dataset.get_labels())
return dataset, labels_schema
def _create_segmentation_dataset_and_labels_schema(dataset_params):
logger.debug(f'Using for train annotation file {dataset_params.annotations_train}')
logger.debug(f'Using for val annotation file {dataset_params.annotations_val}')
labels_list = []
items = load_dataset_items(
ann_file_path=dataset_params.annotations_train,
data_root_dir=dataset_params.images_train_dir,
subset=Subset.TRAINING,
labels_list=labels_list)
items.extend(load_dataset_items(
ann_file_path=dataset_params.annotations_val,
data_root_dir=dataset_params.images_val_dir,
subset=Subset.VALIDATION,
labels_list=labels_list))
items.extend(load_dataset_items(
ann_file_path=dataset_params.annotations_test,
data_root_dir=dataset_params.images_test_dir,
subset=Subset.TESTING,
labels_list=labels_list))
dataset = DatasetEntity(items=items)
labels_schema = LabelSchemaEntity.from_labels(labels_list)
return dataset, labels_schema
class ClassificationClsIncrTrainingTestParameters(DefaultOTETestCreationParametersInterface):
def test_case_class(self) -> Type[OTETestCaseInterface]:
return generate_ote_integration_test_case_class(
get_test_action_classes()
)
def test_bunches(self) -> List[Dict[str, Any]]:
test_bunches = [
dict(
model_name=[
'ClassIncremental_Image_Classification_EfficinetNet-B0',
'ClassIncremental_Image_Classification_EfficinetNet-V2-S',
'ClassIncremental_Image_Classification_MobileNet-V3-large-1x',
'ClassIncremental_Image_Classification_MobileNet-V3-large-0.75x',
'ClassIncremental_Image_Classification_MobileNet-V3-small'
],
dataset_name=['cifar10_cls_incr'],
usecase='precommit',
),
dict(
model_name=[
'ClassIncremental_Image_Classification_EfficinetNet-B0',
'ClassIncremental_Image_Classification_EfficinetNet-V2-S',
'ClassIncremental_Image_Classification_MobileNet-V3-large-1x',
'ClassIncremental_Image_Classification_MobileNet-V3-large-0.75x',
'ClassIncremental_Image_Classification_MobileNet-V3-small'
],
dataset_name=['cifar10_cls_incr'],
num_training_iters=KEEP_CONFIG_FIELD_VALUE,
batch_size=KEEP_CONFIG_FIELD_VALUE,
usecase=REALLIFE_USECASE_CONSTANT,
),
]
return deepcopy(test_bunches)
def default_test_parameters(self) -> Dict[str, Any]:
DEFAULT_TEST_PARAMETERS = {
"num_training_iters": 2,
"batch_size": 16,
}
return deepcopy(DEFAULT_TEST_PARAMETERS)
class DetectionClsIncrTrainingTestParameters(DefaultOTETestCreationParametersInterface):
def test_case_class(self) -> Type[OTETestCaseInterface]:
return generate_ote_integration_test_case_class(
get_test_action_classes()
)
def test_bunches(self) -> List[Dict[str, Any]]:
test_bunches = [
dict(
model_name=[
'ClassIncremental_Object_Detection_Gen3_ATSS',
'ClassIncremental_Object_Detection_Gen3_VFNet',
],
dataset_name='coco_cls_incr',
usecase='precommit',
),
dict(
model_name=[
'ClassIncremental_Object_Detection_Gen3_ATSS',
'ClassIncremental_Object_Detection_Gen3_VFNet',
],
dataset_name='coco_cls_incr',
num_training_iters=KEEP_CONFIG_FIELD_VALUE,
batch_size=KEEP_CONFIG_FIELD_VALUE,
usecase=REALLIFE_USECASE_CONSTANT,
),
]
return deepcopy(test_bunches)
class SegmentationClsIncrTrainingTestParameters(DefaultOTETestCreationParametersInterface):
def test_case_class(self) -> Type[OTETestCaseInterface]:
return generate_ote_integration_test_case_class(
get_test_action_classes()
)
def test_bunches(self) -> List[Dict[str, Any]]:
test_bunches = [
dict(
model_name=[
'ClassIncremental_Semantic_Segmentation_Lite-HRNet-18_OCR',
],
dataset_name='voc_cls_incr',
usecase='precommit',
),
dict(
model_name=[
'ClassIncremental_Semantic_Segmentation_Lite-HRNet-18_OCR',
],
dataset_name='voc_cls_incr',
num_training_iters=KEEP_CONFIG_FIELD_VALUE,
batch_size=KEEP_CONFIG_FIELD_VALUE,
usecase=REALLIFE_USECASE_CONSTANT,
),
]
return deepcopy(test_bunches)
class TestOTEReallifeClassificationClsIncr(OTETrainingTestInterface):
"""
The main class of running test in this file.
"""
PERFORMANCE_RESULTS = None # it is required for e2e system
helper = OTETestHelper(ClassificationClsIncrTrainingTestParameters())
@classmethod
def get_list_of_tests(cls, usecase: Optional[str] = None):
"""
This method should be a classmethod. It is called before fixture initialization, during
tests discovering.
"""
return cls.helper.get_list_of_tests(usecase)
@pytest.fixture
def params_factories_for_test_actions_fx(self, current_test_parameters_fx,
dataset_definitions_fx, template_paths_fx,
ote_current_reference_dir_fx) -> Dict[str,Callable[[], Dict]]:
logger.debug('params_factories_for_test_actions_fx: begin')
test_parameters = deepcopy(current_test_parameters_fx)
dataset_definitions = deepcopy(dataset_definitions_fx)
template_paths = deepcopy(template_paths_fx)
def _training_params_factory() -> Dict:
if dataset_definitions is None:
pytest.skip('The parameter "--dataset-definitions" is not set')
model_name = test_parameters['model_name']
dataset_name = test_parameters['dataset_name']
num_training_iters = test_parameters['num_training_iters']
batch_size = test_parameters['batch_size']
dataset_params = _get_dataset_params_from_dataset_definitions(dataset_definitions, dataset_name)
if model_name not in template_paths:
raise ValueError(f'Model {model_name} is absent in template_paths, '
f'template_paths.keys={list(template_paths.keys())}')
template_path = make_path_be_abs(template_paths[model_name], template_paths[ROOT_PATH_KEY])
logger.debug('training params factory: Before creating dataset and labels_schema')
dataset, labels_schema = _create_classification_dataset_and_labels_schema(dataset_params, model_name)
ckpt_path = None
if hasattr(dataset_params, 'pre_trained_model'):
ckpt_path = osp.join(osp.join(dataset_params.pre_trained_model, model_name),"weights.pth")
logger.info(f"Pretrained path : {ckpt_path}")
logger.debug('training params factory: After creating dataset and labels_schema')
return {
'dataset': dataset,
'labels_schema': labels_schema,
'template_path': template_path,
'num_training_iters': num_training_iters,
'batch_size': batch_size,
'checkpoint': ckpt_path
}
params_factories_for_test_actions = {
'training': _training_params_factory,
}
logger.debug('params_factories_for_test_actions_fx: end')
return params_factories_for_test_actions
@pytest.fixture
def test_case_fx(self, current_test_parameters_fx, params_factories_for_test_actions_fx):
"""
This fixture returns the test case class OTEIntegrationTestCase that should be used for the current test.
Note that the cache from the test helper allows to store the instance of the class
between the tests.
If the main parameters used for this test are the same as the main parameters used for the previous test,
the instance of the test case class will be kept and re-used. It is helpful for tests that can
re-use the result of operations (model training, model optimization, etc) made for the previous tests,
if these operations are time-consuming.
If the main parameters used for this test differs w.r.t. the previous test, a new instance of
test case class will be created.
"""
test_case = type(self).helper.get_test_case(current_test_parameters_fx,
params_factories_for_test_actions_fx)
return test_case
@e2e_pytest_performance
def test(self,
test_parameters,
test_case_fx, data_collector_fx,
cur_test_expected_metrics_callback_fx):
test_case_fx.run_stage(test_parameters['test_stage'], data_collector_fx,
cur_test_expected_metrics_callback_fx)
class TestOTEReallifeObjectDetectionClsIncr(OTETrainingTestInterface):
"""
The main class of running test in this file.
"""
PERFORMANCE_RESULTS = None # it is required for e2e system
helper = OTETestHelper(DetectionClsIncrTrainingTestParameters())
@classmethod
def get_list_of_tests(cls, usecase: Optional[str] = None):
"""
This method should be a classmethod. It is called before fixture initialization, during
tests discovering.
"""
return cls.helper.get_list_of_tests(usecase)
@pytest.fixture
def params_factories_for_test_actions_fx(self, current_test_parameters_fx,
dataset_definitions_fx, template_paths_fx,
ote_current_reference_dir_fx) -> Dict[str,Callable[[], Dict]]:
logger.debug('params_factories_for_test_actions_fx: begin')
test_parameters = deepcopy(current_test_parameters_fx)
dataset_definitions = deepcopy(dataset_definitions_fx)
template_paths = deepcopy(template_paths_fx)
def _training_params_factory() -> Dict:
if dataset_definitions is None:
pytest.skip('The parameter "--dataset-definitions" is not set')
model_name = test_parameters['model_name']
dataset_name = test_parameters['dataset_name']
num_training_iters = test_parameters['num_training_iters']
batch_size = test_parameters['batch_size']
dataset_params = _get_dataset_params_from_dataset_definitions(dataset_definitions, dataset_name)
if model_name not in template_paths:
raise ValueError(f'Model {model_name} is absent in template_paths, '
f'template_paths.keys={list(template_paths.keys())}')
template_path = make_path_be_abs(template_paths[model_name], template_paths[ROOT_PATH_KEY])
logger.debug('training params factory: Before creating dataset and labels_schema')
dataset, labels_schema = _create_object_detection_dataset_and_labels_schema(dataset_params)
ckpt_path = None
if hasattr(dataset_params, 'pre_trained_model'):
ckpt_path = osp.join(osp.join(dataset_params.pre_trained_model, model_name),"weights.pth")
logger.debug('training params factory: After creating dataset and labels_schema')
return {
'dataset': dataset,
'labels_schema': labels_schema,
'template_path': template_path,
'num_training_iters': num_training_iters,
'batch_size': batch_size,
'checkpoint': ckpt_path
}
params_factories_for_test_actions = {
'training': _training_params_factory,
}
logger.debug('params_factories_for_test_actions_fx: end')
return params_factories_for_test_actions
@pytest.fixture
def test_case_fx(self, current_test_parameters_fx, params_factories_for_test_actions_fx):
"""
This fixture returns the test case class OTEIntegrationTestCase that should be used for the current test.
Note that the cache from the test helper allows to store the instance of the class
between the tests.
If the main parameters used for this test are the same as the main parameters used for the previous test,
the instance of the test case class will be kept and re-used. It is helpful for tests that can
re-use the result of operations (model training, model optimization, etc) made for the previous tests,
if these operations are time-consuming.
If the main parameters used for this test differs w.r.t. the previous test, a new instance of
test case class will be created.
"""
test_case = type(self).helper.get_test_case(current_test_parameters_fx,
params_factories_for_test_actions_fx)
return test_case
# TODO(lbeynens): move to common fixtures
@pytest.fixture
def data_collector_fx(self, request) -> DataCollector:
setup = deepcopy(request.node.callspec.params)
setup['environment_name'] = os.environ.get('TT_ENVIRONMENT_NAME', 'no-env')
setup['test_type'] = os.environ.get('TT_TEST_TYPE', 'no-test-type') # TODO: get from e2e test type
setup['scenario'] = 'api' # TODO(lbeynens): get from a fixture!
setup['test'] = request.node.name
setup['subject'] = 'detection-cls-incr'
setup['project'] = 'ote'
if 'test_parameters' in setup:
assert isinstance(setup['test_parameters'], dict)
if 'dataset_name' not in setup:
setup['dataset_name'] = setup['test_parameters'].get('dataset_name')
if 'model_name' not in setup:
setup['model_name'] = setup['test_parameters'].get('model_name')
if 'test_stage' not in setup:
setup['test_stage'] = setup['test_parameters'].get('test_stage')
if 'usecase' not in setup:
setup['usecase'] = setup['test_parameters'].get('usecase')
logger.info(f'creating DataCollector: setup=\n{pformat(setup, width=140)}')
data_collector = DataCollector(name='TestOTEIntegration',
setup=setup)
with data_collector:
logger.info('data_collector is created')
yield data_collector
logger.info('data_collector is released')
@e2e_pytest_performance
def test(self,
test_parameters,
test_case_fx, data_collector_fx,
cur_test_expected_metrics_callback_fx):
test_case_fx.run_stage(test_parameters['test_stage'], data_collector_fx,
cur_test_expected_metrics_callback_fx)
class TestOTEReallifeSegmentationClsIncr(OTETrainingTestInterface):
"""
The main class of running test in this file.
"""
PERFORMANCE_RESULTS = None # it is required for e2e system
helper = OTETestHelper(SegmentationClsIncrTrainingTestParameters())
@classmethod
def get_list_of_tests(cls, usecase: Optional[str] = None):
"""
This method should be a classmethod. It is called before fixture initialization, during
tests discovering.
"""
return cls.helper.get_list_of_tests(usecase)
@pytest.fixture
def params_factories_for_test_actions_fx(self, current_test_parameters_fx,
dataset_definitions_fx, template_paths_fx,
ote_current_reference_dir_fx) -> Dict[str,Callable[[], Dict]]:
logger.debug('params_factories_for_test_actions_fx: begin')
test_parameters = deepcopy(current_test_parameters_fx)
dataset_definitions = deepcopy(dataset_definitions_fx)
template_paths = deepcopy(template_paths_fx)
def _training_params_factory() -> Dict:
if dataset_definitions is None:
pytest.skip('The parameter "--dataset-definitions" is not set')
model_name = test_parameters['model_name']
dataset_name = test_parameters['dataset_name']
num_training_iters = test_parameters['num_training_iters']
batch_size = test_parameters['batch_size']
dataset_params = _get_dataset_params_from_dataset_definitions(dataset_definitions, dataset_name)
if model_name not in template_paths:
raise ValueError(f'Model {model_name} is absent in template_paths, '
f'template_paths.keys={list(template_paths.keys())}')
template_path = make_path_be_abs(template_paths[model_name], template_paths[ROOT_PATH_KEY])
logger.debug('training params factory: Before creating dataset and labels_schema')
dataset, labels_schema = _create_segmentation_dataset_and_labels_schema(dataset_params)
import os.path as osp
ckpt_path = None
if hasattr(dataset_params, 'pre_trained_model'):
ckpt_path = osp.join(osp.join(dataset_params.pre_trained_model, model_name), "weights.pth")
logger.debug('training params factory: After creating dataset and labels_schema')
return {
'dataset': dataset,
'labels_schema': labels_schema,
'template_path': template_path,
'num_training_iters': num_training_iters,
'batch_size': batch_size,
'checkpoint' : ckpt_path
}
params_factories_for_test_actions = {
'training': _training_params_factory,
}
logger.debug('params_factories_for_test_actions_fx: end')
return params_factories_for_test_actions
@pytest.fixture
def test_case_fx(self, current_test_parameters_fx, params_factories_for_test_actions_fx):
"""
This fixture returns the test case class OTEIntegrationTestCase that should be used for the current test.
Note that the cache from the test helper allows to store the instance of the class
between the tests.
If the main parameters used for this test are the same as the main parameters used for the previous test,
the instance of the test case class will be kept and re-used. It is helpful for tests that can
re-use the result of operations (model training, model optimization, etc) made for the previous tests,
if these operations are time-consuming.
If the main parameters used for this test differs w.r.t. the previous test, a new instance of
test case class will be created.
"""
test_case = type(self).helper.get_test_case(current_test_parameters_fx,
params_factories_for_test_actions_fx)
return test_case
# TODO(lbeynens): move to common fixtures
@pytest.fixture
def data_collector_fx(self, request) -> DataCollector:
setup = deepcopy(request.node.callspec.params)
setup['environment_name'] = os.environ.get('TT_ENVIRONMENT_NAME', 'no-env')
setup['test_type'] = os.environ.get('TT_TEST_TYPE', 'no-test-type') # TODO: get from e2e test type
setup['scenario'] = 'api' # TODO(lbeynens): get from a fixture!
setup['test'] = request.node.name
setup['subject'] = 'segmentation-cls-incr'
setup['project'] = 'ote'
if 'test_parameters' in setup:
assert isinstance(setup['test_parameters'], dict)
if 'dataset_name' not in setup:
setup['dataset_name'] = setup['test_parameters'].get('dataset_name')
if 'model_name' not in setup:
setup['model_name'] = setup['test_parameters'].get('model_name')
if 'test_stage' not in setup:
setup['test_stage'] = setup['test_parameters'].get('test_stage')
if 'usecase' not in setup:
setup['usecase'] = setup['test_parameters'].get('usecase')
logger.info(f'creating DataCollector: setup=\n{pformat(setup, width=140)}')
data_collector = DataCollector(name='TestOTEIntegration',
setup=setup)
with data_collector:
logger.info('data_collector is created')
yield data_collector
logger.info('data_collector is released')
@e2e_pytest_performance
def test(self,
test_parameters,
test_case_fx, data_collector_fx,
cur_test_expected_metrics_callback_fx):
if "pot_evaluation" in test_parameters["test_stage"]:
pytest.xfail("Known issue CVS-84576")
test_case_fx.run_stage(test_parameters['test_stage'], data_collector_fx,
cur_test_expected_metrics_callback_fx)
| 49.015652
| 114
| 0.637028
| 3,011
| 28,184
| 5.625374
| 0.100963
| 0.040501
| 0.022317
| 0.027276
| 0.858779
| 0.833156
| 0.825776
| 0.807356
| 0.79608
| 0.791298
| 0
| 0.003016
| 0.294068
| 28,184
| 574
| 115
| 49.101045
| 0.848311
| 0.101689
| 0
| 0.759551
| 0
| 0
| 0.180002
| 0.066494
| 0
| 0
| 0
| 0.003484
| 0.008989
| 1
| 0.067416
| false
| 0
| 0.047191
| 0.011236
| 0.197753
| 0.002247
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5332afbb7f3b75083b77a5e57adf9b9350f6b50c
| 15
|
py
|
Python
|
Lib/test/test_compiler/testcorpus/01_expr_unary.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 1,886
|
2021-05-03T23:58:43.000Z
|
2022-03-31T19:15:58.000Z
|
Lib/test/test_compiler/testcorpus/01_expr_unary.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 70
|
2021-05-04T23:25:35.000Z
|
2022-03-31T18:42:08.000Z
|
Lib/test/test_compiler/testcorpus/01_expr_unary.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 52
|
2021-05-04T21:26:03.000Z
|
2022-03-08T18:02:56.000Z
|
-a
~a
+a
not a
| 3
| 5
| 0.466667
| 5
| 15
| 1.4
| 0.4
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 15
| 4
| 6
| 3.75
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
536b526aa5014af2c7254df974b6035251f06c23
| 1,320
|
py
|
Python
|
catalyst/exchange/exchange_execution.py
|
izokay/catalyst
|
db312be6543cd00f7f4f3ff6dc9072d29f6e7d97
|
[
"Apache-2.0"
] | 6
|
2019-05-23T17:52:22.000Z
|
2022-01-30T08:13:19.000Z
|
catalyst/exchange/exchange_execution.py
|
Donstesh/catalyst
|
83e2e2b23c0266bde1c11e68a6acde7460c6eadf
|
[
"Apache-2.0"
] | null | null | null |
catalyst/exchange/exchange_execution.py
|
Donstesh/catalyst
|
83e2e2b23c0266bde1c11e68a6acde7460c6eadf
|
[
"Apache-2.0"
] | 1
|
2020-10-29T16:14:10.000Z
|
2020-10-29T16:14:10.000Z
|
from catalyst.finance.execution import LimitOrder, StopOrder, StopLimitOrder
class ExchangeLimitOrder(LimitOrder):
def get_limit_price(self, is_buy):
"""
We may be trading Satoshis with 8 decimals, we cannot round numbers.
Parameters
----------
is_buy: bool
Returns
-------
float
"""
return self.limit_price
class ExchangeStopOrder(StopOrder):
def get_stop_price(self, is_buy):
"""
We may be trading Satoshis with 8 decimals, we cannot round numbers.
Parameters
----------
is_buy: bool
Returns
-------
float
"""
return self.stop_price
class ExchangeStopLimitOrder(StopLimitOrder):
def get_limit_price(self, is_buy):
"""
We may be trading Satoshis with 8 decimals, we cannot round numbers.
Parameters
----------
is_buy: bool
Returns
-------
float
"""
return self.limit_price
def get_stop_price(self, is_buy):
"""
We may be trading Satoshis with 8 decimals, we cannot round numbers.
Parameters
----------
is_buy: bool
Returns
-------
float
"""
return self.stop_price
| 19.411765
| 76
| 0.539394
| 133
| 1,320
| 5.203008
| 0.270677
| 0.057803
| 0.063584
| 0.080925
| 0.751445
| 0.751445
| 0.751445
| 0.751445
| 0.751445
| 0.751445
| 0
| 0.004711
| 0.356818
| 1,320
| 67
| 77
| 19.701493
| 0.810365
| 0.387121
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.083333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
72580bfc56ab5d7fb3b95d1ec6cb33dc31ae28c8
| 20,919
|
py
|
Python
|
tests/registries/test_matching_for_spawning.py
|
ztaylor54/kopf
|
214310c8f678fad5e267aacfbbc8acdadb557d9c
|
[
"MIT"
] | null | null | null |
tests/registries/test_matching_for_spawning.py
|
ztaylor54/kopf
|
214310c8f678fad5e267aacfbbc8acdadb557d9c
|
[
"MIT"
] | null | null | null |
tests/registries/test_matching_for_spawning.py
|
ztaylor54/kopf
|
214310c8f678fad5e267aacfbbc8acdadb557d9c
|
[
"MIT"
] | null | null | null |
import copy
import pytest
import kopf
from kopf.reactor.causation import ResourceSpawningCause
from kopf.structs.dicts import parse_field
from kopf.structs.filters import MetaFilterToken
from kopf.structs.handlers import ResourceDaemonHandler, \
ResourceSpawningHandler, ResourceTimerHandler
# Used in the tests. Must be global-scoped, or its qualname will be affected.
def some_fn(x=None):
pass
def _never(*_, **__):
return False
def _always(*_, **__):
return True
spawning_decorators = pytest.mark.parametrize('decorator', [
(kopf.timer),
(kopf.daemon),
])
@pytest.fixture()
def handler_factory(registry, resource):
def factory(**kwargs):
handler = ResourceSpawningHandler(**dict(dict(
fn=some_fn, id='a',
errors=None, timeout=None, retries=None, backoff=None, cooldown=None,
annotations=None, labels=None, when=None,
field=None, value=None,
requires_finalizer=None, initial_delay=None,
), **kwargs))
registry.resource_spawning_handlers[resource].append(handler)
return handler
return factory
@pytest.fixture(params=[
pytest.param(dict(body={}), id='no-field'),
])
def cause_no_field(request, cause_factory):
kwargs = copy.deepcopy(request.param)
kwargs['body'].update({'metadata': {'labels': {'somelabel': 'somevalue'},
'annotations': {'someannotation': 'somevalue'}}})
cause = cause_factory(cls=ResourceSpawningCause, **kwargs)
return cause
@pytest.fixture(params=[
pytest.param(dict(body={'some-field': 'new'}), id='with-field'),
])
def cause_with_field(request, cause_factory):
kwargs = copy.deepcopy(request.param)
kwargs['body'].update({'metadata': {'labels': {'somelabel': 'somevalue'},
'annotations': {'someannotation': 'somevalue'}}})
cause = cause_factory(cls=ResourceSpawningCause, **kwargs)
return cause
@pytest.fixture(params=[
# The original no-diff was equivalent to no-field until body/old/new were added to the check.
pytest.param(dict(body={}, diff=[]), id='no-field'),
pytest.param(dict(body={'some-field': 'new'}), id='with-field'),
])
def cause_any_field(request, cause_factory):
kwargs = copy.deepcopy(request.param)
kwargs['body'].update({'metadata': {'labels': {'somelabel': 'somevalue'},
'annotations': {'someannotation': 'somevalue'}}})
cause = cause_factory(cls=ResourceSpawningCause, **kwargs)
return cause
#
# "Catch-all" handlers are those with event == None.
#
def test_catchall_handlers_without_field_found(
cause_any_field, registry, handler_factory):
cause = cause_any_field
handler_factory(field=None)
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
def test_catchall_handlers_with_field_found(
cause_with_field, registry, handler_factory):
cause = cause_with_field
handler_factory(field=parse_field('some-field'))
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
def test_catchall_handlers_with_field_ignored(
cause_no_field, registry, handler_factory):
cause = cause_no_field
handler_factory(field=parse_field('some-field'))
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('labels', [
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'somevalue', 'otherlabel': 'othervalue'}, id='with-extra-label'),
])
def test_catchall_handlers_with_exact_labels_satisfied(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
pytest.param({'otherlabel': 'othervalue'}, id='with-other-label'),
])
def test_catchall_handlers_with_exact_labels_not_satisfied(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('labels', [
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_desired_labels_present(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': MetaFilterToken.PRESENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'otherlabel': 'othervalue'}, id='with-other-label'),
])
def test_catchall_handlers_with_desired_labels_absent(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': MetaFilterToken.PRESENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('labels', [
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_undesired_labels_present(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': MetaFilterToken.ABSENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'otherlabel': 'othervalue'}, id='with-other-label'),
])
def test_catchall_handlers_with_undesired_labels_absent(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': MetaFilterToken.ABSENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_labels_callback_says_true(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': _always})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_labels_callback_says_false(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': _never})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
pytest.param({'otherlabel': 'othervalue'}, id='with-other-label'),
pytest.param({'somelabel': 'somevalue', 'otherlabel': 'othervalue'}, id='with-extra-label'),
])
def test_catchall_handlers_without_labels(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels=None)
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('annotations', [
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'somevalue', 'otherannotation': 'othervalue'}, id='with-extra-annotation'),
])
def test_catchall_handlers_with_exact_annotations_satisfied(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
pytest.param({'otherannotation': 'othervalue'}, id='with-other-annotation'),
])
def test_catchall_handlers_with_exact_annotations_not_satisfied(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('annotations', [
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_desired_annotations_present(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': MetaFilterToken.PRESENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'otherannotation': 'othervalue'}, id='with-other-annotation'),
])
def test_catchall_handlers_with_desired_annotations_absent(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': MetaFilterToken.PRESENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('annotations', [
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_undesired_annotations_present(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': MetaFilterToken.ABSENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'otherannotation': 'othervalue'}, id='with-other-annotation'),
])
def test_catchall_handlers_with_undesired_annotations_absent(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': MetaFilterToken.ABSENT})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_annotations_callback_says_true(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': _always})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
])
def test_catchall_handlers_with_annotations_callback_says_false(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory(annotations={'someannotation': _never})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('annotations', [
pytest.param({}, id='without-annotation'),
pytest.param({'someannotation': 'somevalue'}, id='with-annotation'),
pytest.param({'someannotation': 'othervalue'}, id='with-other-value'),
pytest.param({'otherannotation': 'othervalue'}, id='with-other-annotation'),
pytest.param({'someannotation': 'somevalue', 'otherannotation': 'othervalue'}, id='with-extra-annotation'),
])
def test_catchall_handlers_without_annotations(
cause_factory, registry, handler_factory, resource, annotations):
cause = cause_factory(body={'metadata': {'annotations': annotations}})
handler_factory()
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels, annotations', [
pytest.param({'somelabel': 'somevalue'}, {'someannotation': 'somevalue'}, id='with-label-annotation'),
pytest.param({'somelabel': 'somevalue', 'otherlabel': 'othervalue'}, {'someannotation': 'somevalue'}, id='with-extra-label-annotation'),
pytest.param({'somelabel': 'somevalue'}, {'someannotation': 'somevalue', 'otherannotation': 'othervalue'}, id='with-label-extra-annotation'),
pytest.param({'somelabel': 'somevalue', 'otherlabel': 'othervalue'}, {'someannotation': 'somevalue', 'otherannotation': 'othervalue'}, id='with-extra-label-extra-annotation'),
])
def test_catchall_handlers_with_labels_and_annotations_satisfied(
cause_factory, registry, handler_factory, resource, labels, annotations):
cause = cause_factory(body={'metadata': {'labels': labels, 'annotations': annotations}})
handler_factory(labels={'somelabel': 'somevalue'}, annotations={'someannotation': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('labels', [
pytest.param({}, id='without-label'),
pytest.param({'somelabel': 'somevalue'}, id='with-label'),
pytest.param({'somelabel': 'othervalue'}, id='with-other-value'),
pytest.param({'otherlabel': 'othervalue'}, id='with-other-label'),
pytest.param({'somelabel': 'somevalue', 'otherlabel': 'othervalue'}, id='with-extra-label'),
])
def test_catchall_handlers_with_labels_and_annotations_not_satisfied(
cause_factory, registry, handler_factory, resource, labels):
cause = cause_factory(body={'metadata': {'labels': labels}})
handler_factory(labels={'somelabel': 'somevalue'}, annotations={'someannotation': 'somevalue'})
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@pytest.mark.parametrize('when', [
pytest.param(None, id='without-when'),
pytest.param(lambda body=None, **_: body['spec']['name'] == 'test', id='with-when'),
pytest.param(lambda **_: True, id='with-other-when'),
])
def test_catchall_handlers_with_when_callback_matching(
cause_factory, registry, handler_factory, resource, when):
cause = cause_factory(body={'spec': {'name': 'test'}})
handler_factory(when=when)
handlers = registry.resource_spawning_handlers[resource].get_handlers(cause)
assert handlers
@pytest.mark.parametrize('when', [
pytest.param(lambda body=None, **_: body['spec']['name'] != "test", id='with-when'),
pytest.param(lambda **_: False, id='with-other-when'),
])
def test_catchall_handlers_with_when_callback_mismatching(
cause_factory, registry, handler_factory, resource, when):
cause = cause_factory(body={'spec': {'name': 'test'}})
handler_factory(when=when)
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@spawning_decorators
def test_decorator_without_field_found(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
field=None)
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@spawning_decorators
def test_decorator_with_field_found(
cause_with_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
field='some-field')
def some_fn(**_): ...
cause = cause_with_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@spawning_decorators
def test_decorator_with_field_ignored(
cause_no_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
field='some-field')
def some_fn(**_): ...
cause = cause_no_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@spawning_decorators
def test_decorator_with_labels_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
labels={'somelabel': MetaFilterToken.PRESENT})
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@spawning_decorators
def test_decorator_with_labels_not_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
labels={'otherlabel': MetaFilterToken.PRESENT})
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@spawning_decorators
def test_decorator_with_annotations_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
annotations={'someannotation': MetaFilterToken.PRESENT})
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@spawning_decorators
def test_decorator_with_annotations_not_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
annotations={'otherannotation': MetaFilterToken.PRESENT})
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
@spawning_decorators
def test_decorator_with_filter_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
when=_always)
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert handlers
@spawning_decorators
def test_decorator_with_filter_not_satisfied(
cause_any_field, registry, resource, decorator):
@decorator(resource.group, resource.version, resource.plural, registry=registry,
when=_never)
def some_fn(**_): ...
cause = cause_any_field
handlers = registry.resource_spawning_handlers[cause.resource].get_handlers(cause)
assert not handlers
| 41.260355
| 179
| 0.725656
| 2,259
| 20,919
| 6.497123
| 0.060204
| 0.059345
| 0.057232
| 0.07631
| 0.923418
| 0.917831
| 0.901615
| 0.880221
| 0.866185
| 0.848062
| 0
| 0
| 0.135523
| 20,919
| 506
| 180
| 41.341897
| 0.811601
| 0.010421
| 0
| 0.744949
| 0
| 0
| 0.16603
| 0.011307
| 0
| 0
| 0
| 0
| 0.085859
| 1
| 0.128788
| false
| 0.002525
| 0.017677
| 0.005051
| 0.164141
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
725e89cf08e313604d30246a177851cb1b47d417
| 137,474
|
py
|
Python
|
closed/NVIDIA/configs/dlrm/Offline/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 12
|
2021-09-23T08:05:57.000Z
|
2022-03-21T03:52:11.000Z
|
closed/NVIDIA/configs/dlrm/Offline/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 11
|
2021-09-23T20:34:06.000Z
|
2022-01-22T07:58:02.000Z
|
closed/NVIDIA/configs/dlrm/Offline/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 16
|
2021-09-23T20:26:38.000Z
|
2022-03-09T12:59:56.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.getcwd())
from code.common.constants import Benchmark, Scenario
from code.common.system_list import System, Architecture, MIGConfiguration, MIGSlice
from configs.configuration import *
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx1(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx1_HighAccuracy(A100_PCIe_80GBx1):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx1_Triton(A100_PCIe_80GBx1):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx1_HighAccuracy_Triton(A100_PCIe_80GBx1_Triton):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx8(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 2280000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx8_HighAccuracy(A100_PCIe_80GBx8):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx8_Triton(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1600000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx8_HighAccuracy_Triton(A100_PCIe_80GBx8_Triton):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIe_80GBx8_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1690000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIe_80GBx8_HighAccuracy_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1690000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIe_80GBx8_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 280000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIe_80GBx8_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 280000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x1(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x1_HighAccuracy(A100_PCIe_80GB_aarch64x1):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x2(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 2, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 560000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x2_HighAccuracy(A100_PCIe_80GB_aarch64x2):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x4(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1100000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x4_HighAccuracy(A100_PCIe_80GB_aarch64x4):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIe_80GB_aarch64x4_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 800000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIe_80GB_aarch64x4_HighAccuracy_MaxQ(A100_PCIe_80GB_aarch64x4_MaxQ):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_aarch64x1(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_aarch64x1_HighAccuracy(A100_PCIe_aarch64x1):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_aarch64x2(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 2, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 560000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_aarch64x2_HighAccuracy(A100_PCIe_aarch64x2):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_aarch64x4(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1100000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_aarch64x4_HighAccuracy(A100_PCIe_aarch64x4):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIe_aarch64x4_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 800000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
# TODO: set numa
numa_config = None
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIe_aarch64x4_HighAccuracy_MaxQ(A100_PCIe_aarch64x4_MaxQ):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_MIG_1x1g5gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-PCIe", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_MIG_1x1g5gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-PCIe", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_MIG_1x1g5gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-PCIe", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_MIG_1x1g5gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-PCIe", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex1(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex1_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 270000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex8(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 2160000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 2160000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1600000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex8_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1600000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIex8_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 1690000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIex8_HighAccuracy_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1690000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = False
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIex8_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 280000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIex8_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 315000
offline_expected_qps = 280000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 40000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Hetero(A100_SXM_80GB_MIG_1x1g10gb):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 40000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Hetero_HighAccuracy(A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 40000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 40000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_56x1g10gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 10): 7},
1: {MIGSlice(1, 10): 7},
2: {MIGSlice(1, 10): 7},
3: {MIGSlice(1, 10): 7},
4: {MIGSlice(1, 10): 7},
5: {MIGSlice(1, 10): 7},
6: {MIGSlice(1, 10): 7},
7: {MIGSlice(1, 10): 7},
})
system = System("A100-SXM-80GB", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 2240000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_56x1g10gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 10): 7},
1: {MIGSlice(1, 10): 7},
2: {MIGSlice(1, 10): 7},
3: {MIGSlice(1, 10): 7},
4: {MIGSlice(1, 10): 7},
5: {MIGSlice(1, 10): 7},
6: {MIGSlice(1, 10): 7},
7: {MIGSlice(1, 10): 7},
})
system = System("A100-SXM-80GB", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 2240000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_56x1g10gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 10): 7},
1: {MIGSlice(1, 10): 7},
2: {MIGSlice(1, 10): 7},
3: {MIGSlice(1, 10): 7},
4: {MIGSlice(1, 10): 7},
5: {MIGSlice(1, 10): 7},
6: {MIGSlice(1, 10): 7},
7: {MIGSlice(1, 10): 7},
})
system = System("A100-SXM-80GB", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 2240000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_56x1g10gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 10): 7},
1: {MIGSlice(1, 10): 7},
2: {MIGSlice(1, 10): 7},
3: {MIGSlice(1, 10): 7},
4: {MIGSlice(1, 10): 7},
5: {MIGSlice(1, 10): 7},
6: {MIGSlice(1, 10): 7},
7: {MIGSlice(1, 10): 7},
})
system = System("A100-SXM-80GB", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 2240000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx1(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx1_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx4_HighAccuracy(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx4_HighAccuracy_Triton(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4_Triton(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx4_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 250
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx4_HighAccuracy_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 250
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx4_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
power_limit = 250
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx4_Triton_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 1000000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
use_jemalloc = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
power_limit = 250
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2400000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2400000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2400000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 2450000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx8_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2400000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 275
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx8_HighAccuracy_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2400000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
power_limit = 275
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx8_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2000000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
power_limit = 275
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx8_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 334000
offline_expected_qps = 2000000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
power_limit = 275
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GB_MIG_1x1g5gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-SXM4-40GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GB_MIG_1x1g5gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-SXM4-40GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GB_MIG_1x1g5gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-SXM4-40GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GB_MIG_1x1g5gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 5): 1}})
system = System("A100-SXM4-40GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 51200
offline_expected_qps = 36000
max_pairs_per_staging_thread = 51200
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx1(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx1_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 310000
max_pairs_per_staging_thread = 262100
num_staging_batches = 8
num_staging_threads = 8
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 2
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx8(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 2120000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 2120000
max_pairs_per_staging_thread = 262100
start_from_device = True
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 190000
max_pairs_per_staging_thread = 262100
start_from_device = False
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx8_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 190000
max_pairs_per_staging_thread = 262100
start_from_device = False
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x1(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 99000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x1_HighAccuracy(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 99000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 99000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x1_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 99000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x8(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 792000.0
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x8_HighAccuracy(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 792000.0
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 792000.0
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x8_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 204000
offline_expected_qps = 792000.0
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 34000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 34000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Hetero(A30_MIG_1x1g6gb):
offline_expected_qps = 31117
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Hetero_HighAccuracy(A30_MIG_1x1g6gb_Hetero):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 34000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_32x1g6gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 6): 4},
1: {MIGSlice(1, 6): 4},
2: {MIGSlice(1, 6): 4},
3: {MIGSlice(1, 6): 4},
4: {MIGSlice(1, 6): 4},
5: {MIGSlice(1, 6): 4},
6: {MIGSlice(1, 6): 4},
7: {MIGSlice(1, 6): 4},
})
system = System("A30", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 1088000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_32x1g6gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 6): 4},
1: {MIGSlice(1, 6): 4},
2: {MIGSlice(1, 6): 4},
3: {MIGSlice(1, 6): 4},
4: {MIGSlice(1, 6): 4},
5: {MIGSlice(1, 6): 4},
6: {MIGSlice(1, 6): 4},
7: {MIGSlice(1, 6): 4},
})
system = System("A30", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 1088000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_32x1g6gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({
0: {MIGSlice(1, 6): 4},
1: {MIGSlice(1, 6): 4},
2: {MIGSlice(1, 6): 4},
3: {MIGSlice(1, 6): 4},
4: {MIGSlice(1, 6): 4},
5: {MIGSlice(1, 6): 4},
6: {MIGSlice(1, 6): 4},
7: {MIGSlice(1, 6): 4},
})
system = System("A30", Architecture.Ampere, 8, mig_conf=_mig_configuration)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 12800
offline_expected_qps = 1088000
max_pairs_per_staging_thread = 12800
num_staging_batches = 2
num_staging_threads = 2
use_jemalloc = True
workspace_size = 536870912
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x1(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 140000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x1_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 140000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 140000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x1_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 140000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x8(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1120000
max_pairs_per_staging_thread = 262100
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x8_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1120000
max_pairs_per_staging_thread = 262100
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1120000
max_pairs_per_staging_thread = 262100
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x8_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = True
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
offline_expected_qps = 1120000
max_pairs_per_staging_thread = 262100
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x1(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 34000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x1_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 34000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 34000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x1_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 2
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 34000
max_pairs_per_staging_thread = 262100
num_staging_batches = 4
num_staging_threads = 4
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x20(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 680000
max_pairs_per_staging_thread = 262100
num_staging_batches = 64
num_staging_threads = 80
use_jemalloc = False
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x20_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 680000
max_pairs_per_staging_thread = 262100
num_staging_batches = 64
num_staging_threads = 80
use_jemalloc = False
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x20_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 360000
max_pairs_per_staging_thread = 262100
num_staging_batches = 64
num_staging_threads = 80
use_jemalloc = False
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x20_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 360000
max_pairs_per_staging_thread = 262100
num_staging_batches = 64
num_staging_threads = 80
use_jemalloc = False
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
batch_triton_requests = True
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x8(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 272000
max_pairs_per_staging_thread = 262100
num_staging_batches = 16
num_staging_threads = 16
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x8_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 272000
max_pairs_per_staging_thread = 262100
num_staging_batches = 16
num_staging_threads = 16
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 254000
max_pairs_per_staging_thread = 262100
num_staging_batches = 16
num_staging_threads = 16
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x8_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
check_contiguity = True
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
gpu_num_bundles = 2
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 32
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
use_small_tile_gemm_plugin = False
complete_threads = 8
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 262100
offline_expected_qps = 254000
max_pairs_per_staging_thread = 262100
num_staging_batches = 16
num_staging_threads = 16
use_jemalloc = True
scenario = Scenario.Offline
benchmark = Benchmark.DLRM
use_triton = True
| 38.123683
| 153
| 0.754354
| 17,910
| 137,474
| 5.392853
| 0.017253
| 0.052678
| 0.062556
| 0.08231
| 0.99033
| 0.989564
| 0.988104
| 0.98594
| 0.984532
| 0.979138
| 0
| 0.063116
| 0.169159
| 137,474
| 3,605
| 154
| 38.134258
| 0.782506
| 0.004895
| 0
| 0.959484
| 0
| 0.009004
| 0.189032
| 0.165798
| 0
| 0
| 0
| 0.000277
| 0
| 1
| 0
| false
| 0.004502
| 0.001501
| 0
| 0.939676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7295b77fafeb0a0dcde2ae2399f9538ec805ada2
| 14,079
|
py
|
Python
|
src/opendr/engine/data.py
|
CptPirx/opendr_internal
|
9204f254c4a32ce4298dd4b95cabaab8f60fd3c7
|
[
"Apache-2.0"
] | null | null | null |
src/opendr/engine/data.py
|
CptPirx/opendr_internal
|
9204f254c4a32ce4298dd4b95cabaab8f60fd3c7
|
[
"Apache-2.0"
] | null | null | null |
src/opendr/engine/data.py
|
CptPirx/opendr_internal
|
9204f254c4a32ce4298dd4b95cabaab8f60fd3c7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Aristotle University of Thessaloniki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
import numpy as np
import torch
from typing import Union
class Data(ABC):
"""
Data abstract class allows for representing different types of data. This class serves as the basis for
more complicated data types. For data classes, conversion from (using the constructor) and to NumPy
arrays (using the .numpy() method) will be supported to make the library compliant with the standard pipelines
used by the computer vision and robotics communities.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data):
self._data = None
@abstractmethod
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
pass
@property
def data(self):
"""
Getter of data field.
This returns the internal representation of the data (which might not be a NumPy array).
:return: the actual data held by the object
:rtype: Type of data
"""
return self._data
@data.setter
def data(self, data):
"""
Setter for data. This will perform the necessary type checking (if needed).
:param: data to be used for creating a vector
"""
self._data = data
@abstractmethod
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
pass
class Vector(Data):
"""
A class used for representing multidimensional vector data.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data=None):
super().__init__(data)
if data is not None:
self.data = data
@property
def data(self):
"""
Getter of data. Vector class returns a float32 NumPy array.
:return: the actual data held by the object
:rtype: A float32 NumPy array
"""
if self._data is None:
raise ValueError("Vector is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a vector
"""
# Convert input data to a NumPy array
# Note that will also fail for non-numeric data (which is expected)
data = np.asarray(data, dtype=np.float32)
# Check if the supplied vector is 1D
if len(data.shape) != 1:
raise ValueError(
"Only 1-D arrays are supported by Vector. Please supply a data object that can be casted "
"into a 1-D NumPy array.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return str(self.data)
class Timeseries(Data):
"""
A class used for representing multidimensional timeseries data.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data=None):
super().__init__(data)
if data is not None:
self.data = data
@property
def data(self):
"""
Getter of data. Vector class returns a float32 NumPy array.
:return: the actual data held by the object
:rtype: A float32 NumPy array
"""
if self._data is None:
raise ValueError("Timeseries is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a vector
"""
# Convert input data to a NumPy array
# Note that will also fail for non-numeric data (which is expected)
data = np.asarray(data, dtype=np.float32)
# Check if the supplied array is 2D
if len(data.shape) != 2:
raise ValueError(
"Only 2-D arrays are supported by Timeseries. Please supply a data object that can be casted "
"into a 2-D NumPy array. The first dimension corresponds to time and the second to the features.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return str(self.data)
class Image(Data):
"""
A class used for representing image data.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data=None, dtype=np.uint8):
super().__init__(data)
self.dtype = dtype
if data is not None:
self.data = data
@property
def data(self):
"""
Getter of data. Image class returns a *dtype* NumPy array.
:return: the actual data held by the object
:rtype: A *dtype* NumPy array
"""
if self._data is None:
raise ValueError("Image is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a vector
"""
# Convert input data to a NumPy array
data = np.asarray(data, dtype=self.dtype)
# Check if the supplied vector is 3D, e.g. (width, height, channels)
if len(data.shape) != 3:
raise ValueError(
"Only 3-D arrays are supported by Image. Please supply a data object that can be casted "
"into a 3-D NumPy array.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return str(self.data)
class Video(Data):
"""
A class used for representing video data.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data: Union[torch.Tensor, np.ndarray]=None):
"""Construct a new Video
Args:
data (Union[torch.Tensor, np.ndarray], optional):
Video tensor of shape (channels, time_steps, height, width).
Defaults to None.
"""
super().__init__(data)
if data is not None:
self.data = data
@property
def data(self):
"""
Getter of data. Video class returns a float32 NumPy array.
:return: the actual data held by the object
:rtype: A float32 NumPy array
"""
if self._data is None:
raise ValueError("Video is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a vector
"""
# Convert input data to a NumPy array
# Note that will also fail for non-numeric data (which is expected)
data = np.asarray(data, dtype=np.float32)
# Check if the supplied vector is 4D, e.g. (channels, time, height, width)
if len(data.shape) != 4:
raise ValueError(
"Only 4-D arrays are supported by Image. Please supply a data object that can be casted "
"into a 4-D NumPy array.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return str(self.data)
class PointCloud(Data):
"""
A class used for representing point cloud data.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data=None):
super().__init__(data)
if data is not None:
self.data = data
@property
def data(self):
"""
Getter of data. PointCloud class returns a float32 NumPy array.
:return: the actual data held by the object
:rtype: A float32 NumPy array in form [length x channels] where channels can be xyz[ref][rgb+]
"""
if self._data is None:
raise ValueError("Point Cloud is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a point cloud
"""
# Convert input data to a NumPy array
# Note that will also fail for non-numeric data (which is expected)
data = np.asarray(data, dtype=np.float32)
# Check if the supplied array is 2D, e.g. (length, channels)
if len(data.shape) != 2:
raise ValueError(
"Only 2-D arrays are supported by PointCloud. Please supply a data object that can be casted "
"into a 2-D NumPy array.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return "Points: " + str(self.data)
class PointCloudWithCalibration(PointCloud):
"""
A class used for representing point cloud data with camera-lidar calibration matricies.
This class provides abstract methods for:
- returning a NumPy compatible representation of data (numpy())
"""
def __init__(self, data=None, calib=None, image_shape=None):
super().__init__(data)
if data is not None:
self.data = data
self.calib = calib
self.image_shape = image_shape
@property
def data(self):
"""
Getter of data. PointCloudWithCalibration class returns a float32 NumPy array representing a point cloud.
:return: the actual data held by the object
:rtype: A float32 NumPy array in form [length x channels] where channels can be xyz[ref][rgb+]
"""
if self._data is None:
raise ValueError("Point Cloud is empty")
return self._data
@data.setter
def data(self, data):
"""
Setter for data.
:param: data to be used for creating a point cloud
"""
# Convert input data to a NumPy array
# Note that will also fail for non-numeric data (which is expected)
data = np.asarray(data, dtype=np.float32)
# Check if the supplied array is 2D, e.g. (length, channels)
if len(data.shape) != 2:
raise ValueError(
"Only 2-D arrays are supported by PointCloud. Please supply a data object that can be casted "
"into a 2-D NumPy array.")
self._data = data
def numpy(self):
"""
Returns a NumPy-compatible representation of data.
:return: a NumPy-compatible representation of data
:rtype: numpy.ndarray
"""
# Since this class stores the data as NumPy arrays, we can directly return the data
return self.data
def __str__(self):
"""
Returns a human-friendly string-based representation of the data.
:return: a human-friendly string-based representation of the data
:rtype: str
"""
return "Points: " + str(self.data) + "\nCalib:" + str(self.calib)
| 29.64
| 114
| 0.610768
| 1,805
| 14,079
| 4.705817
| 0.122438
| 0.049918
| 0.039557
| 0.07417
| 0.797504
| 0.789852
| 0.769602
| 0.751707
| 0.742995
| 0.734048
| 0
| 0.006508
| 0.31238
| 14,079
| 474
| 115
| 29.702532
| 0.870881
| 0.507564
| 0
| 0.75974
| 0
| 0.006494
| 0.15513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.227273
| false
| 0.012987
| 0.025974
| 0
| 0.422078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72db17878e806293980f92a1f33462f06fc90cbd
| 6,562
|
py
|
Python
|
tests/src/Diksha_TPD/TPD_Completion_percentage/check_with_collections.py
|
sreenivas8084/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | null | null | null |
tests/src/Diksha_TPD/TPD_Completion_percentage/check_with_collections.py
|
sreenivas8084/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | 2
|
2022-02-01T00:55:12.000Z
|
2022-03-29T22:29:09.000Z
|
tests/src/Diksha_TPD/TPD_Completion_percentage/check_with_collections.py
|
SreenivasNimmagadda/cQube
|
3352a13f41679d707979e287d1880f0723b27510
|
[
"MIT"
] | null | null | null |
import os
import time
from selenium.webdriver.support.select import Select
from Data.parameters import Data
from get_dir import pwd
from reuse_func import GetData
class collection_records():
def __init__(self,driver):
self.driver = driver
def test_download_collection_options(self):
self.data = GetData()
count = 0
self.p = pwd()
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
colls = Select(self.driver.find_element_by_id(Data.coll_names))
colcount = len(colls.options)-1
for i in range(1,len(colls.options)):
colls.select_by_index(i)
time.sleep(5)
self.data.page_loading(self.driver)
name = colls.options[i].text
# self.driver.find_element_by_id(Data.Download).click()
# time.sleep(3)
# self.filename = self.p.get_download_dir() +"/completion_percentage_overall_undefined_"+self.data.get_current_date()+".csv"
# print(self.filename)
# if os.path.isfile(self.filename) != True:
# print(colls.options[i].text,"csv file is not downloaded ")
# count = count + 1
# self.data.page_loading(self.driver)
# os.remove(self.filename)
return colcount,count
def test_districtwise_collections(self):
self.data = GetData()
count = 0
self.p = pwd()
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
district = Select(self.driver.find_element_by_id(Data.sar_district))
colls = Select(self.driver.find_element_by_id(Data.coll_names))
colcount = len(colls.options) - 1
self.data.page_loading(self.driver)
for j in range(len(district.options)-3,len(district.options)):
district.select_by_index(j)
self.data.page_loading(self.driver)
value = self.driver.find_element_by_id(Data.sar_district).get_attribute('value')
value = value[4:]
for i in range(1, len(colls.options)):
colls.select_by_index(i)
self.data.page_loading(self.driver)
self.driver.find_element_by_id(Data.Download).click()
time.sleep(3)
self.filename = self.p.get_download_dir() + "/" + "completion_percentage_overall_" + value.strip() + '_' + self.data.get_current_date() + ".csv"
print(self.filename)
if os.path.isfile(self.filename) != True:
print(colls.options[i].text, "csv file is not downloaded ")
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
return colcount, count
def test_blockwise_collections(self):
self.data = GetData()
count = 0
self.p = pwd()
self.driver.implicitly_wait(100)
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
district = Select(self.driver.find_element_by_id(Data.sar_district))
block = Select(self.driver.find_element_by_id(Data.sar_block))
colls = Select(self.driver.find_element_by_id(Data.coll_names))
colcount = len(colls.options) - 1
self.data.page_loading(self.driver)
for j in range(1,len(district.options)-32):
district.select_by_index(j)
for k in range(1, len(block.options)-2):
block.select_by_index(k)
self.data.page_loading(self.driver)
value = self.driver.find_element_by_id(Data.sar_block).get_attribute('value')
value = value[5:]+'_'
for i in range(1, len(colls.options)):
colls.select_by_index(i)
self.data.page_loading(self.driver)
self.driver.find_element_by_id(Data.Download).click()
time.sleep(3)
self.filename = self.p.get_download_dir() + "/" + "completion_percentage_overall_" + value.strip() + '_' + self.data.get_current_date() + ".csv"
print(self.filename)
if os.path.isfile(self.filename) != True:
print(colls.options[i].text, "csv file is not downloaded ")
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
return colcount, count
def test_clusterwise_collections(self):
self.data = GetData()
count = 0
self.p = pwd()
self.driver.implicitly_wait(100)
self.driver.find_element_by_xpath(Data.hyper_link).click()
self.data.page_loading(self.driver)
district = Select(self.driver.find_element_by_id(Data.sar_district))
block = Select(self.driver.find_element_by_id(Data.sar_block))
cluster = Select(self.driver.find_element_by_id(Data.sar_cluster))
colls = Select(self.driver.find_element_by_id(Data.coll_names))
colcount = len(colls.options) - 1
self.data.page_loading(self.driver)
for j in range(1, len(district.options)-32):
district.select_by_index(j)
for k in range(1, len(block.options)-3):
block.select_by_index(k)
for m in range(1, len(cluster.options)):
cluster.select_by_index(m)
self.data.page_loading(self.driver)
value = self.driver.find_element_by_id(Data.sar_cluster).get_attribute('value')
value = value[5:]+'_'
for i in range(1, len(colls.options)):
colls.select_by_index(i)
self.data.page_loading(self.driver)
self.driver.find_element_by_id(Data.Download).click()
time.sleep(3)
self.filename = self.p.get_download_dir() + "/" + "completion_percentage_overall_" + value.strip() + '_' + self.data.get_current_date() + ".csv"
print(self.filename)
if os.path.isfile(self.filename) != True:
print(colls.options[i].text, "csv file is not downloaded ")
count = count + 1
self.data.page_loading(self.driver)
os.remove(self.filename)
return colcount, count
| 48.607407
| 168
| 0.590064
| 809
| 6,562
| 4.572312
| 0.116193
| 0.116248
| 0.079481
| 0.119221
| 0.893755
| 0.870235
| 0.862395
| 0.862395
| 0.862395
| 0.844823
| 0
| 0.009136
| 0.299451
| 6,562
| 135
| 169
| 48.607407
| 0.795519
| 0.061414
| 0
| 0.788136
| 0
| 0
| 0.033496
| 0.014634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042373
| false
| 0
| 0.050847
| 0
| 0.135593
| 0.050847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f459eecee1ea2867fc9688bf1e9ec4eebc1dcef6
| 169
|
py
|
Python
|
blackbox_mpc/trajectory_evaluators/__init__.py
|
wangsd01/blackbox_mpc
|
7876dee1bd85bde310e88741f5c63e3f7bd93916
|
[
"MIT"
] | 29
|
2020-10-20T08:14:45.000Z
|
2022-02-01T13:43:13.000Z
|
blackbox_mpc/trajectory_evaluators/__init__.py
|
wangsd01/blackbox_mpc
|
7876dee1bd85bde310e88741f5c63e3f7bd93916
|
[
"MIT"
] | 3
|
2020-11-27T13:25:08.000Z
|
2021-12-12T04:30:41.000Z
|
blackbox_mpc/trajectory_evaluators/__init__.py
|
wangsd01/blackbox_mpc
|
7876dee1bd85bde310e88741f5c63e3f7bd93916
|
[
"MIT"
] | 3
|
2021-04-15T14:23:41.000Z
|
2022-03-28T05:43:29.000Z
|
from blackbox_mpc.trajectory_evaluators.deterministic import DeterministicTrajectoryEvaluator
from blackbox_mpc.trajectory_evaluators.evaluator_base import EvaluatorBase
| 84.5
| 93
| 0.934911
| 17
| 169
| 9
| 0.647059
| 0.156863
| 0.196078
| 0.326797
| 0.457516
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04142
| 169
| 2
| 94
| 84.5
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f45aaaa2ef8eb2d82919b9ad0886885b2f1847c9
| 1,068
|
py
|
Python
|
src/app/mod_vote/models.py
|
tcouch360/WarwickQnA
|
904502ccc99a7f20692ea1a942ca2df34404c625
|
[
"MIT"
] | null | null | null |
src/app/mod_vote/models.py
|
tcouch360/WarwickQnA
|
904502ccc99a7f20692ea1a942ca2df34404c625
|
[
"MIT"
] | null | null | null |
src/app/mod_vote/models.py
|
tcouch360/WarwickQnA
|
904502ccc99a7f20692ea1a942ca2df34404c625
|
[
"MIT"
] | null | null | null |
from app import db
"""This is the model for Upvote class"""
class Upvote(db.Model):
__tablename__ = "upvotes"
vote_id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
question_id = db.Column(db.Integer, db.ForeignKey('questions.question_id'))
answer_id = db.Column(db.Integer, db.ForeignKey('answers.answer_id'))
comment_id = db.Column(db.Integer, db.ForeignKey('comments.comment_id'))
def __repr__(self):
return '<Upvote %r>' % self.vote_id
"""This is the model for Downvote class"""
class Downvote(db.Model):
__tablename__ = "downvotes"
vote_id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
question_id = db.Column(db.Integer, db.ForeignKey('questions.question_id'))
answer_id = db.Column(db.Integer, db.ForeignKey('answers.answer_id'))
comment_id = db.Column(db.Integer, db.ForeignKey('comments.comment_id'))
def __repr__(self):
return '<downVote %r>' % self.vote_id
| 34.451613
| 79
| 0.701311
| 154
| 1,068
| 4.616883
| 0.233766
| 0.056259
| 0.140647
| 0.168776
| 0.793249
| 0.745429
| 0.745429
| 0.745429
| 0.745429
| 0.745429
| 0
| 0
| 0.152622
| 1,068
| 30
| 80
| 35.6
| 0.785635
| 0
| 0
| 0.631579
| 0
| 0
| 0.182556
| 0.042596
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.052632
| 0.105263
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
f463dd927c5920f1676a4bf69b50d4ad030442d7
| 9,119
|
py
|
Python
|
tests/test_timespan.py
|
aracnid/aracnid-utils
|
48c34f2e7320a5ad261a92d027c8b29814fd55e0
|
[
"MIT"
] | null | null | null |
tests/test_timespan.py
|
aracnid/aracnid-utils
|
48c34f2e7320a5ad261a92d027c8b29814fd55e0
|
[
"MIT"
] | null | null | null |
tests/test_timespan.py
|
aracnid/aracnid-utils
|
48c34f2e7320a5ad261a92d027c8b29814fd55e0
|
[
"MIT"
] | null | null | null |
"""Test functions for datetime_utils.py.
"""
from datetime import datetime, timedelta
from dateutil import tz
from aracnid_utils.datetime_utils import timespan
# initialize module variables
REF_BEGIN_STR = '2020-06-01T00:00:00-04:00'
REF_THRU_STR = '2020-06-08T00:00:00-04:00'
REF_BEGIN_ISO = '2020-W10'
REF_THRU_ISO = '2020-W25'
REF_WEEK_STR = '2020-W23'
REF_BEGIN_DATE_ONLY_STR = '2020-06-07'
REF_BEGIN_DATETIME_STR = '2020-06-07T00:00:00-04:00'
REF_THRU_DATE_ONLY_STR = '2020-06-07'
REF_THRU_DATETIME_STR = '2020-06-08T00:00:00-04:00'
def test_timespan_args_begin_str_and_thru_str():
"""Tests timespan arguments: begin_str, thru_str.
"""
start, end = timespan(begin_str=REF_BEGIN_STR, thru_str=REF_THRU_STR)
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_begin_str_and_thru_str_none():
"""Tests timespan arguments: begin_str, thru_str=None.
"""
start, end = timespan(begin_str=REF_BEGIN_STR, thru_str=None)
end_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == end_now.isoformat()[0:18]
def test_timespan_args_begin_str_and_thru_str_missing():
"""Tests timespan arguments: begin_str, thru_str missing.
"""
start, end = timespan(begin_str=REF_BEGIN_STR)
end_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == end_now.isoformat()[0:18]
def test_timespan_args_begin_str_none_and_thru_str():
"""Tests timespan arguments: begin_str=None, thru_str.
"""
start, end = timespan(begin_str=None, thru_str=REF_THRU_STR)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
assert isinstance(start, datetime)
assert start.isoformat() == start_first.isoformat()
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_begin_str_missing_and_thru_str():
"""Tests timespan arguments, begin_str missing, thru_str.
"""
start, end = timespan(thru_str=REF_THRU_STR)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
assert isinstance(start, datetime)
assert start.isoformat() == start_first.isoformat()
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_begin_and_thru():
"""Tests timespan arguments: begin, thru.
"""
begin = datetime.fromisoformat(REF_BEGIN_STR)
thru = datetime.fromisoformat(REF_THRU_STR)
start, end = timespan(begin=begin, thru=thru)
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_begin_and_thru_none():
"""Tests timespan arguments: begin, thru=None.
"""
begin = datetime.fromisoformat(REF_BEGIN_STR)
start, end = timespan(begin=begin, thru=None)
thru_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == thru_now.isoformat()[0:18]
def test_timespan_args_begin_and_thru_missing():
"""Tests timespan arguments: begin, thru missing.
"""
begin = datetime.fromisoformat(REF_BEGIN_STR)
start, end = timespan(begin=begin)
thru_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == thru_now.isoformat()[0:18]
def test_timespan_args_begin_none_and_thru():
"""Tests timespan arguments: begin=None, thru.
"""
thru = datetime.fromisoformat(REF_THRU_STR)
start, end = timespan(begin=None, thru=thru)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
assert isinstance(start, datetime)
assert start == start_first
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_begin_missing_and_thru():
"""Tests timespan arguments: begin missing, thru.
"""
thru = datetime.fromisoformat(REF_THRU_STR)
start, end = timespan(thru=thru)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
assert isinstance(start, datetime)
assert start == start_first
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_begin_iso_week_and_thru_iso_week():
"""Tests timespan arguments: begin iso week, thru iso week.
"""
start, end = timespan(begin_str=REF_BEGIN_ISO, thru_str=REF_THRU_ISO)
begin = datetime.fromisocalendar(2020, 10, 1).astimezone()
thru = datetime.fromisocalendar(2020, 25, 1).astimezone() + timedelta(days=7)
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_begin_iso_week_and_thru_str_none():
"""Tests timespan arguments: begin iso week, thru_str=None.
"""
start, end = timespan(begin_str=REF_BEGIN_ISO, thru_str=None)
begin = datetime.fromisocalendar(2020, 10, 1).astimezone()
end_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == end_now.isoformat()[0:18]
def test_timespan_args_begin_iso_week_and_thru_str_missing():
"""Tests timespan arguments: begin iso week, thru_str missing.
"""
start, end = timespan(begin_str=REF_BEGIN_ISO)
begin = datetime.fromisocalendar(2020, 10, 1).astimezone()
end_now = datetime.now(tz.tzlocal())
assert isinstance(start, datetime)
assert start == begin
assert isinstance(end, datetime)
assert end.isoformat()[0:18] == end_now.isoformat()[0:18]
def test_timespan_args_begin_str_none_and_thru_iso_week():
"""Tests timespan arguments: begin_str=None, thru iso week.
"""
start, end = timespan(begin_str=None, thru_str=REF_THRU_ISO)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
thru = datetime.fromisocalendar(2020, 25, 1).astimezone() + timedelta(days=7)
assert isinstance(start, datetime)
assert start.isoformat() == start_first.isoformat()
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_begin_str_missing_and_thru_iso_week():
"""Tests timespan arguments: begin_str missing, thru iso week.
"""
start, end = timespan(thru_str=REF_THRU_ISO)
start_first = datetime(2000, 1, 1, 0, 0).astimezone()
thru = datetime.fromisocalendar(2020, 25, 1).astimezone() + timedelta(days=7)
assert isinstance(start, datetime)
assert start.isoformat() == start_first.isoformat()
assert isinstance(end, datetime)
assert end == thru
def test_timespan_args_week_str():
"""Tests timespan argument: week_str.
"""
start, end = timespan(week_str=REF_WEEK_STR)
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_week_str_and_begin_str():
"""Tests timespan arguments: week_str, begin_str.
"""
start, end = timespan(week_str=REF_THRU_ISO, begin_str=REF_BEGIN_STR)
thru = datetime.fromisocalendar(2020, 25, 1).astimezone() + timedelta(days=7)
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat() == thru.isoformat()
def test_timespan_args_week_str_and_thru_str():
"""Tests timespan arguments: week_str, thru_str.
"""
start, end = timespan(week_str=REF_BEGIN_ISO, thru_str=REF_THRU_STR)
begin = datetime.fromisocalendar(2020, 10, 1).astimezone()
assert isinstance(start, datetime)
assert start.isoformat() == begin.isoformat()
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_week_str_and_begin_str_and_thru_str():
"""Tests timespan arguments: week_str, begin_str, thru_str.
The week string is ignored in this case.
"""
start, end = timespan(week_str=REF_WEEK_STR, begin_str=REF_BEGIN_STR, thru_str=REF_THRU_STR)
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_STR
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_STR
def test_timespan_args_begin_date_only_str_and_thru_str():
"""Tests timespan arguments: begin_str date only, thru_str.
"""
start, _ = timespan(begin_str=REF_BEGIN_DATE_ONLY_STR)
assert isinstance(start, datetime)
assert start.isoformat() == REF_BEGIN_DATETIME_STR
def test_timespan_args_begin_str_and_thru_date_only_str():
"""Tests timespan arguments: begin_str date only, thru_str.
"""
_, end = timespan(
begin_str=REF_BEGIN_STR,
thru_str=REF_THRU_DATE_ONLY_STR)
assert isinstance(end, datetime)
assert end.isoformat() == REF_THRU_DATETIME_STR
| 33.16
| 96
| 0.72157
| 1,252
| 9,119
| 4.970447
| 0.055112
| 0.062992
| 0.050619
| 0.064117
| 0.930098
| 0.907279
| 0.874819
| 0.83095
| 0.741604
| 0.690182
| 0
| 0.032873
| 0.166027
| 9,119
| 274
| 97
| 33.281022
| 0.785404
| 0.140366
| 0
| 0.62963
| 0
| 0
| 0.018658
| 0.012957
| 0
| 0
| 0
| 0
| 0.493827
| 1
| 0.12963
| false
| 0
| 0.018519
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f47bed46e176154fa36b996250084ec69a99f63b
| 454
|
py
|
Python
|
networkx_mod/algorithms/approximation/__init__.py
|
movingpictures83/MATria
|
d3dbd0d15e00dbc26db39ace0663868180fdc471
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
networkx_mod/algorithms/approximation/__init__.py
|
movingpictures83/MATria
|
d3dbd0d15e00dbc26db39ace0663868180fdc471
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
networkx_mod/algorithms/approximation/__init__.py
|
movingpictures83/MATria
|
d3dbd0d15e00dbc26db39ace0663868180fdc471
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
from networkx_mod.algorithms.approximation.clustering_coefficient import *
from networkx_mod.algorithms.approximation.clique import *
from networkx_mod.algorithms.approximation.dominating_set import *
from networkx_mod.algorithms.approximation.independent_set import *
from networkx_mod.algorithms.approximation.matching import *
from networkx_mod.algorithms.approximation.ramsey import *
from networkx_mod.algorithms.approximation.vertex_cover import *
| 56.75
| 74
| 0.876652
| 53
| 454
| 7.301887
| 0.301887
| 0.217054
| 0.271318
| 0.452196
| 0.795866
| 0.697674
| 0.242894
| 0
| 0
| 0
| 0
| 0
| 0.061674
| 454
| 7
| 75
| 64.857143
| 0.908451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be78ec4d982d331c3a8da0d2b2ca2d480c4d4f1e
| 370
|
py
|
Python
|
pyautofinance/common/analyzers/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | null | null | null |
pyautofinance/common/analyzers/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | null | null | null |
pyautofinance/common/analyzers/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | 1
|
2022-02-24T09:18:13.000Z
|
2022-02-24T09:18:13.000Z
|
from pyautofinance.common.analyzers.analyzer import Analyzer
from pyautofinance.common.analyzers.ratios import *
from pyautofinance.common.analyzers.trade_list import TradeList
from pyautofinance.common.analyzers.returns import *
from pyautofinance.common.analyzers.metrics_pack import MetricsPack
from pyautofinance.common.analyzers.trade_analyzer import TradeAnalyzer
| 52.857143
| 71
| 0.881081
| 43
| 370
| 7.511628
| 0.348837
| 0.315789
| 0.427245
| 0.594427
| 0.365325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064865
| 370
| 6
| 72
| 61.666667
| 0.933526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be8910201cd3edf8d70cd67636d71082af13ffcb
| 100
|
py
|
Python
|
brainlit/algorithms/__init__.py
|
vikramc1/brainlit
|
8ad30b34658c434f2b92434c118c76402c27e1d0
|
[
"Apache-2.0"
] | null | null | null |
brainlit/algorithms/__init__.py
|
vikramc1/brainlit
|
8ad30b34658c434f2b92434c118c76402c27e1d0
|
[
"Apache-2.0"
] | null | null | null |
brainlit/algorithms/__init__.py
|
vikramc1/brainlit
|
8ad30b34658c434f2b92434c118c76402c27e1d0
|
[
"Apache-2.0"
] | null | null | null |
import brainlit.algorithms.generate_fragments
from brainlit.algorithms.generate_fragments import *
| 25
| 52
| 0.88
| 11
| 100
| 7.818182
| 0.545455
| 0.418605
| 0.604651
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07
| 100
| 3
| 53
| 33.333333
| 0.924731
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
22943310a3b2d239280280b7f98193f71cfd3952
| 17,189
|
py
|
Python
|
zerver/webhooks/github/tests.py
|
vabs22/zulip
|
470d0a481c0a990a81b2facc6dac63364791178a
|
[
"Apache-2.0"
] | null | null | null |
zerver/webhooks/github/tests.py
|
vabs22/zulip
|
470d0a481c0a990a81b2facc6dac63364791178a
|
[
"Apache-2.0"
] | 11
|
2020-06-05T18:43:08.000Z
|
2022-03-02T14:55:12.000Z
|
zerver/webhooks/github/tests.py
|
vabs22/zulip
|
470d0a481c0a990a81b2facc6dac63364791178a
|
[
"Apache-2.0"
] | null | null | null |
import ujson
from typing import Dict, Optional, Text
from zerver.models import Message
from zerver.lib.webhooks.git import COMMITS_LIMIT
from zerver.lib.test_classes import WebhookTestCase
class GithubV1HookTests(WebhookTestCase):
STREAM_NAME = None # type: Optional[Text]
URL_TEMPLATE = u"/api/v1/external/github"
FIXTURE_DIR_NAME = 'github'
SEND_STREAM = False
BRANCHES = None # type: Optional[Text]
push_content = u"""zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 3 commits to branch master.
* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))
* Baz needs to be longer ([06ebe5f](https://github.com/zbenjamin/zulip-test/commit/06ebe5f472a32f6f31fd2a665f0c7442b69cce72))
* Final edit to baz, I swear ([b954491](https://github.com/zbenjamin/zulip-test/commit/b95449196980507f08209bdfdc4f1d611689b7a8))"""
def test_spam_branch_is_ignored(self):
# type: () -> None
self.SEND_STREAM = True
self.STREAM_NAME = 'commits'
self.BRANCHES = 'dev,staging'
data = self.get_body('push')
# We subscribe to the stream in this test, even though
# it won't get written, to avoid failing for the wrong
# reason.
self.subscribe_to_stream(self.TEST_USER_EMAIL, self.STREAM_NAME)
prior_count = Message.objects.count()
result = self.client_post(self.URL_TEMPLATE, data)
self.assert_json_success(result)
after_count = Message.objects.count()
self.assertEqual(prior_count, after_count)
def get_body(self, fixture_name):
# type: (Text) -> Dict[str, Text]
api_key = self.get_api_key(self.TEST_USER_EMAIL)
data = ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, 'v1_' + fixture_name))
data.update({'email': self.TEST_USER_EMAIL,
'api-key': api_key,
'payload': ujson.dumps(data['payload'])})
if self.SEND_STREAM:
data['stream'] = self.STREAM_NAME
if self.BRANCHES is not None:
data['branches'] = self.BRANCHES
return data
def basic_test(self, fixture_name, stream_name, expected_subject, expected_content, send_stream=False, branches=None):
# type: (Text, Text, Text, Text, bool, Optional[Text]) -> None
self.STREAM_NAME = stream_name
self.SEND_STREAM = send_stream
self.BRANCHES = branches
self.send_and_test_stream_message(fixture_name, expected_subject, expected_content, content_type=None)
def test_user_specified_branches(self):
# type: () -> None
self.basic_test('push', 'my_commits', 'zulip-test / master', self.push_content,
send_stream=True, branches="master,staging")
def test_user_specified_stream(self):
# type: () -> None
"""Around May 2013 the github webhook started to specify the stream.
Before then, the stream was hard coded to "commits"."""
self.basic_test('push', 'my_commits', 'zulip-test / master', self.push_content,
send_stream=True)
def test_legacy_hook(self):
# type: () -> None
self.basic_test('push', 'commits', 'zulip-test / master', self.push_content)
def test_push_multiple_commits(self):
# type: () -> None
commit_info = "* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))\n"
expected_subject = "zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 50 commits to branch master.\n\n{}[and {} more commit(s)]".format(
commit_info * COMMITS_LIMIT,
50 - COMMITS_LIMIT,
)
self.basic_test('push_commits_more_than_limit', 'commits', 'zulip-test / master', expected_subject)
def test_issues_opened(self):
# type: () -> None
self.basic_test('issues_opened', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin opened [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)\n\n~~~ quote\nI tried changing the widgets, but I got:\r\n\r\nPermission denied: widgets are immutable\n~~~")
def test_issue_comment(self):
# type: () -> None
self.basic_test('issue_comment', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/issues/5#issuecomment-23374280) on [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)\n\n~~~ quote\nWhoops, I did something wrong.\r\n\r\nI'm sorry.\n~~~")
def test_issues_closed(self):
# type: () -> None
self.basic_test('issues_closed', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin closed [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)")
def test_pull_request_opened(self):
# type: () -> None
self.basic_test('pull_request_opened', 'commits',
"zulip-test / PR #7 Counting is hard.",
"lfaraone opened [PR #7](https://github.com/zbenjamin/zulip-test/pull/7)(assigned to lfaraone)\nfrom `patch-2` to `master`\n\n~~~ quote\nOmitted something I think?\n~~~")
def test_pull_request_closed(self):
# type: () -> None
self.basic_test('pull_request_closed', 'commits',
"zulip-test / PR #7 Counting is hard.",
"zbenjamin closed [PR #7](https://github.com/zbenjamin/zulip-test/pull/7)")
def test_pull_request_synchronize(self):
# type: () -> None
self.basic_test('pull_request_synchronize', 'commits',
"zulip-test / PR #13 Even more cowbell.",
"zbenjamin synchronized [PR #13](https://github.com/zbenjamin/zulip-test/pull/13)")
def test_pull_request_comment(self):
# type: () -> None
self.basic_test('pull_request_comment', 'commits',
"zulip-test / PR #9 Less cowbell.",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/pull/9#issuecomment-24771110) on [PR #9](https://github.com/zbenjamin/zulip-test/pull/9)\n\n~~~ quote\nYeah, who really needs more cowbell than we already have?\n~~~")
def test_pull_request_comment_user_specified_stream(self):
# type: () -> None
self.basic_test('pull_request_comment', 'my_commits',
"zulip-test / PR #9 Less cowbell.",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/pull/9#issuecomment-24771110) on [PR #9](https://github.com/zbenjamin/zulip-test/pull/9)\n\n~~~ quote\nYeah, who really needs more cowbell than we already have?\n~~~",
send_stream=True)
def test_commit_comment(self):
# type: () -> None
self.basic_test('commit_comment', 'commits',
"zulip-test",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533#commitcomment-4252302) on [7c99467](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533)\n~~~ quote\nAre we sure this is enough cowbell?\n~~~")
def test_commit_comment_line(self):
# type: () -> None
self.basic_test('commit_comment_line', 'commits',
"zulip-test",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533#commitcomment-4252307) on [7c99467](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533)\n~~~ quote\nThis line adds /unlucky/ cowbell (because of its line number). We should remove it.\n~~~")
class GithubV2HookTests(WebhookTestCase):
STREAM_NAME = None # type: Optional[Text]
URL_TEMPLATE = u"/api/v1/external/github"
FIXTURE_DIR_NAME = 'github'
SEND_STREAM = False
BRANCHES = None # type: Optional[Text]
push_content = """zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 3 commits to branch master.
* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))
* Baz needs to be longer ([06ebe5f](https://github.com/zbenjamin/zulip-test/commit/06ebe5f472a32f6f31fd2a665f0c7442b69cce72))
* Final edit to baz, I swear ([b954491](https://github.com/zbenjamin/zulip-test/commit/b95449196980507f08209bdfdc4f1d611689b7a8))"""
def test_spam_branch_is_ignored(self):
# type: () -> None
self.SEND_STREAM = True
self.STREAM_NAME = 'commits'
self.BRANCHES = 'dev,staging'
data = self.get_body('push')
# We subscribe to the stream in this test, even though
# it won't get written, to avoid failing for the wrong
# reason.
self.subscribe_to_stream(self.TEST_USER_EMAIL, self.STREAM_NAME)
prior_count = Message.objects.count()
result = self.client_post(self.URL_TEMPLATE, data)
self.assert_json_success(result)
after_count = Message.objects.count()
self.assertEqual(prior_count, after_count)
def get_body(self, fixture_name):
# type: (Text) -> Dict[str, Text]
api_key = self.get_api_key(self.TEST_USER_EMAIL)
data = ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, 'v2_' + fixture_name))
data.update({'email': self.TEST_USER_EMAIL,
'api-key': api_key,
'payload': ujson.dumps(data['payload'])})
if self.SEND_STREAM:
data['stream'] = self.STREAM_NAME
if self.BRANCHES is not None:
data['branches'] = self.BRANCHES
return data
def basic_test(self, fixture_name, stream_name, expected_subject, expected_content, send_stream=False, branches=None):
# type: (Text, Text, Text, Text, bool, Optional[Text]) -> None
self.STREAM_NAME = stream_name
self.SEND_STREAM = send_stream
self.BRANCHES = branches
self.send_and_test_stream_message(fixture_name, expected_subject, expected_content, content_type=None)
def test_user_specified_branches(self):
# type: () -> None
self.basic_test('push', 'my_commits', 'zulip-test / master', self.push_content,
send_stream=True, branches="master,staging")
def test_user_specified_stream(self):
# type: () -> None
"""Around May 2013 the github webhook started to specify the stream.
Before then, the stream was hard coded to "commits"."""
self.basic_test('push', 'my_commits', 'zulip-test / master', self.push_content,
send_stream=True)
def test_push_multiple_commits(self):
# type: () -> None
commit_info = "* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))\n"
expected_subject = "zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 50 commits to branch master.\n\n{}[and {} more commit(s)]".format(
commit_info * COMMITS_LIMIT,
50 - COMMITS_LIMIT,
)
self.basic_test('push_commits_more_than_limit', 'commits', 'zulip-test / master', expected_subject)
def test_push_multiple_committers(self):
# type: () -> None
commit_info = "* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))\n"
expected_subject = "zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 6 commits to branch master. Commits by tomasz (3), baxthehacker (2) and zbenjamin (1).\n\n{}* Add baz ([48c329a](https://github.com/zbenjamin/zulip-test/commit/48c329a0b68a9a379ff195ee3f1c1f4ab0b2a89e))".format(commit_info * 5)
self.basic_test('push_multiple_committers', 'commits', 'zulip-test / master', expected_subject)
def test_push_multiple_committers_with_others(self):
# type: () -> None
commit_info = "* Final edit to baz, I swear ([b954491](https://github.com/zbenjamin/zulip-test/commit/b95449196980507f08209bdfdc4f1d611689b7a8))\n"
expected_subject = "zbenjamin [pushed](https://github.com/zbenjamin/zulip-test/compare/4f9adc4777d5...b95449196980) 10 commits to branch master. Commits by baxthehacker (4), James (3), Tomasz (2) and others (1).\n\n{}* Final edit to baz, I swear ([b954491](https://github.com/zbenjamin/zulip-test/commit/b95449196980507f08209bdfdc4f1d611689b7a8))".format(commit_info * 9)
self.basic_test('push_multiple_committers_with_others', 'commits', 'zulip-test / master', expected_subject)
def test_legacy_hook(self):
# type: () -> None
self.basic_test('push', 'commits', 'zulip-test / master', self.push_content)
def test_issues_opened(self):
# type: () -> None
self.basic_test('issues_opened', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin opened [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)\n\n~~~ quote\nI tried changing the widgets, but I got:\r\n\r\nPermission denied: widgets are immutable\n~~~")
def test_issue_comment(self):
# type: () -> None
self.basic_test('issue_comment', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/issues/5#issuecomment-23374280) on [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)\n\n~~~ quote\nWhoops, I did something wrong.\r\n\r\nI'm sorry.\n~~~")
def test_issues_closed(self):
# type: () -> None
self.basic_test('issues_closed', 'issues',
"zulip-test / Issue #5 The frobnicator doesn't work",
"zbenjamin closed [Issue #5](https://github.com/zbenjamin/zulip-test/issues/5)")
def test_pull_request_opened(self):
# type: () -> None
self.basic_test('pull_request_opened', 'commits',
"zulip-test / PR #7 Counting is hard.",
"lfaraone opened [PR #7](https://github.com/zbenjamin/zulip-test/pull/7)(assigned to lfaraone)\nfrom `patch-2` to `master`\n\n~~~ quote\nOmitted something I think?\n~~~")
def test_pull_request_closed(self):
# type: () -> None
self.basic_test('pull_request_closed', 'commits',
"zulip-test / PR #7 Counting is hard.",
"zbenjamin closed [PR #7](https://github.com/zbenjamin/zulip-test/pull/7)")
def test_pull_request_synchronize(self):
# type: () -> None
self.basic_test('pull_request_synchronize', 'commits',
"zulip-test / PR #13 Even more cowbell.",
"zbenjamin synchronized [PR #13](https://github.com/zbenjamin/zulip-test/pull/13)")
def test_pull_request_comment(self):
# type: () -> None
self.basic_test('pull_request_comment', 'commits',
"zulip-test / PR #9 Less cowbell.",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/pull/9#issuecomment-24771110) on [PR #9](https://github.com/zbenjamin/zulip-test/pull/9)\n\n~~~ quote\nYeah, who really needs more cowbell than we already have?\n~~~")
def test_pull_request_comment_user_specified_stream(self):
# type: () -> None
self.basic_test('pull_request_comment', 'my_commits',
"zulip-test / PR #9 Less cowbell.",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/pull/9#issuecomment-24771110) on [PR #9](https://github.com/zbenjamin/zulip-test/pull/9)\n\n~~~ quote\nYeah, who really needs more cowbell than we already have?\n~~~",
send_stream=True)
def test_commit_comment(self):
# type: () -> None
self.basic_test('commit_comment', 'commits',
"zulip-test",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533#commitcomment-4252302) on [7c99467](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533)\n~~~ quote\nAre we sure this is enough cowbell?\n~~~")
def test_commit_comment_line(self):
# type: () -> None
self.basic_test('commit_comment_line', 'commits',
"zulip-test",
"zbenjamin [commented](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533#commitcomment-4252307) on [7c99467](https://github.com/zbenjamin/zulip-test/commit/7c994678d2f98797d299abed852d3ff9d0834533)\n~~~ quote\nThis line adds /unlucky/ cowbell (because of its line number). We should remove it.\n~~~")
| 57.296667
| 379
| 0.651929
| 2,091
| 17,189
| 5.203252
| 0.109039
| 0.064522
| 0.061765
| 0.101471
| 0.970772
| 0.96875
| 0.954963
| 0.954963
| 0.951563
| 0.951563
| 0
| 0.066315
| 0.217465
| 17,189
| 299
| 380
| 57.488294
| 0.742547
| 0.074466
| 0
| 0.899497
| 0
| 0.180905
| 0.501926
| 0.016038
| 0
| 0
| 0
| 0
| 0.020101
| 1
| 0.180905
| false
| 0
| 0.025126
| 0
| 0.286432
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22c337c4a8ee1717c495bb83d5071b986d2f1cc0
| 6,742
|
py
|
Python
|
data_list.py
|
tntek/PSAT-GDA
|
89647ee66692da02359be7ca240b96b5cdbab19f
|
[
"MIT"
] | 1
|
2022-02-22T07:33:57.000Z
|
2022-02-22T07:33:57.000Z
|
data_list.py
|
tntek/PSAT-GDA
|
89647ee66692da02359be7ca240b96b5cdbab19f
|
[
"MIT"
] | null | null | null |
data_list.py
|
tntek/PSAT-GDA
|
89647ee66692da02359be7ca240b96b5cdbab19f
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
import random
from PIL import Image
from torch.utils.data import Dataset
import os
import os.path
import cv2
import torchvision
from randaugment import RandAugment
import torchsample as ts
import copy
from torchvision import transforms
def make_dataset(image_list, labels):
if labels:
len_ = len(image_list)
images = [(image_list[i].strip(), labels[i, :]) for i in range(len_)]
else:
if len(image_list[0].split()) > 2:
images = [(val.split()[0], np.array([int(la) for la in val.split()[1:]])) for val in image_list]
else:
images = [(val.split()[0], int(val.split()[1])) for val in image_list]
return images
def rgb_loader(path):
with open(path, 'rb') as f:
with Image.open(f) as img:
return img.convert('RGB')
def l_loader(path):
with open(path, 'rb') as f:
with Image.open(f) as img:
return img.convert('L')
class ImageList(Dataset):
def __init__(self, image_list, labels=None, transform=None, target_transform=None, mode='RGB'):
imgs = make_dataset(image_list, labels)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
if mode == 'RGB':
self.loader = rgb_loader
elif mode == 'L':
self.loader = l_loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.imgs)
class ImageList_idx(Dataset):
def __init__(self, image_list, labels=None, transform=None, target_transform=None, mode='RGB'):
imgs = make_dataset(image_list, labels)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
if mode == 'RGB':
self.loader = rgb_loader
elif mode == 'L':
self.loader = l_loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target, index
def __len__(self):
return len(self.imgs)
#================================================================
# 2021.05.09 origin augment
#================================================================
class ImageList_idx_aug(Dataset):
def __init__(self, image_list, labels=None, transform=None, target_transform=None, mode='RGB'):
self.ra_obj = RandAugment()
self.committee_size = 1
resize_size = 256
crop_size = 224
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
self.transform_aug = copy.deepcopy(transform)
self.transform_aug.transforms.insert(0, self.ra_obj)
RandomRotate_1 = ts.transforms.RandomRotate(0.5)
self.rf_1 = transforms.Compose([
transforms.Resize((resize_size, resize_size)),
transforms.RandomCrop(crop_size),
transforms.ToTensor(),
RandomRotate_1,
normalize
])
imgs = make_dataset(image_list, labels)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
if mode == 'RGB':
self.loader = rgb_loader
elif mode == 'L':
self.loader = l_loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
data = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
rand_aug_lst = [self.transform_aug(img) for _ in range(self.committee_size)]
img_1 = self.rf_1(img)
re_ls = [img_1]
return (data, rand_aug_lst), target, index
def __len__(self):
return len(self.imgs)
class ImageList_idx_fix(Dataset):
def __init__(self, image_list, labels=None, transform=None, target_transform=None, mode='RGB'):
self.ra_obj = RandAugment()
self.committee_size = 1
resize_size = 256
crop_size = 224
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
self.transform_aug = copy.deepcopy(transform)
self.transform_aug.transforms.insert(0, self.ra_obj)
RandomRotate_1 = ts.transforms.RandomRotate(0.5)
self.rf_1 = transforms.Compose([
transforms.Resize((resize_size, resize_size)),
transforms.RandomCrop(crop_size),
transforms.ToTensor(),
RandomRotate_1,
normalize
])
imgs = make_dataset(image_list, labels)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
if mode == 'RGB':
self.loader = rgb_loader
elif mode == 'L':
self.loader = l_loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
data = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
rand_aug_lst = [self.transform_aug(img) for _ in range(self.committee_size)]
img_1 = self.rf_1(img)
re_ls = [img_1]
return (data, re_ls), target, index
def __len__(self):
return len(self.imgs)
| 33.04902
| 104
| 0.58054
| 814
| 6,742
| 4.617936
| 0.138821
| 0.079808
| 0.060654
| 0.053206
| 0.874435
| 0.874435
| 0.866986
| 0.866986
| 0.853152
| 0.831604
| 0
| 0.021707
| 0.296203
| 6,742
| 203
| 105
| 33.211823
| 0.770495
| 0.022842
| 0
| 0.792453
| 0
| 0
| 0.046794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09434
| false
| 0
| 0.081761
| 0.025157
| 0.27044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22e5ee2cbf99a5259791e890ee671528cf73193a
| 50
|
py
|
Python
|
instance/config.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
instance/config.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
instance/config.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
NEWS_API_KEY = '65f23e20a185406a962fb29e07fbf789'
| 25
| 49
| 0.88
| 4
| 50
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.468085
| 0.06
| 50
| 1
| 50
| 50
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe0251b15d7570adbf9585bef54eb6ec70777bfc
| 20,197
|
py
|
Python
|
model/prefix.py
|
jkx19/SQuAD_v2
|
97cd8d9fab0af5f46512018ee58f35cb3425fc6b
|
[
"MIT"
] | null | null | null |
model/prefix.py
|
jkx19/SQuAD_v2
|
97cd8d9fab0af5f46512018ee58f35cb3425fc6b
|
[
"MIT"
] | null | null | null |
model/prefix.py
|
jkx19/SQuAD_v2
|
97cd8d9fab0af5f46512018ee58f35cb3425fc6b
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn
from torch.nn import CrossEntropyLoss
from transformers import BertPreTrainedModel, BertModel
from transformers.modeling_outputs import QuestionAnsweringModelOutput
from transformers import RobertaModel, RobertaPreTrainedModel
from model.deberta import DebertaModel, DebertaPreTrainedModel
class BertForQuestionAnswering(BertPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.bert = BertModel(config, add_pooling_layer=False)
self.qa_outputs = torch.nn.Linear(config.hidden_size, config.num_labels)
for param in self.bert.parameters():
param.requires_grad = False
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class PrefixEncoder(torch.nn.Module):
def __init__(self, config):
super().__init__()
self.embedding = torch.nn.Embedding(config.pre_seq_len, config.num_hidden_layers * 2 * config.hidden_size)
# self.trans = torch.nn.Sequential(
# torch.nn.Linear(config.hidden_size, config.mid_dim),
# torch.nn.Tanh(),
# torch.nn.Linear(config.mid_dim, config.num_hidden_layers * 2 * config.hidden_size)
# )
def forward(self, prefix: torch.Tensor):
# prefix_tokens = self.embedding(prefix)
# past_key_values = self.trans(prefix_tokens)
past_key_values = self.embedding(prefix)
return past_key_values
class BertPrefixModel(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.pre_seq_len = config.pre_seq_len
self.mid_dim = config.mid_dim
self.n_layer = config.num_hidden_layers
self.n_head = config.num_attention_heads
self.n_embd = config.hidden_size // config.num_attention_heads
self.bert = BertModel(config, add_pooling_layer=False)
self.qa_outputs = torch.nn.Linear(config.hidden_size, config.num_labels)
self.dropout = torch.nn.Dropout(config.dropout)
self.prefix_encoder = PrefixEncoder(config)
self.prefix_tokens = torch.arange(self.pre_seq_len).long()
for param in self.bert.parameters():
param.requires_grad = False
self.init_weights()
def get_prompt(self, batch_size):
prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(self.bert.device)
past_key_values = self.prefix_encoder(prefix_tokens)
bsz, seqlen, _ = past_key_values.shape
past_key_values = past_key_values.view(
bsz,
seqlen,
self.n_layer * 2,
self.n_head,
self.n_embd
)
past_key_values = self.dropout(past_key_values)
past_key_values = past_key_values.permute([2, 0, 3, 1, 4]).split(2)
return past_key_values
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size = input_ids.shape[0]
past_key_values = self.get_prompt(batch_size=batch_size)
prefix_attention_mask = torch.ones(batch_size, self.pre_seq_len).to(self.bert.device)
attention_mask = torch.cat((prefix_attention_mask, attention_mask), dim=1)
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
past_key_values=past_key_values,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class RobertaPrefixModel(RobertaPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.pre_seq_len = config.pre_seq_len
self.n_layer = config.num_hidden_layers
self.n_head = config.num_attention_heads
self.n_embd = config.hidden_size // config.num_attention_heads
self.roberta = RobertaModel(config, add_pooling_layer=False)
self.qa_outputs = torch.nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
self.dropout = torch.nn.Dropout(config.dropout)
self.prefix_encoder = PrefixEncoder(config)
self.prefix_tokens = torch.arange(self.pre_seq_len).long()
for param in self.roberta.parameters():
param.requires_grad = False
def get_prompt(self, batch_size):
prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(self.roberta.device)
past_key_values = self.prefix_encoder(prefix_tokens)
bsz, seqlen, _ = past_key_values.shape
past_key_values = past_key_values.view(
bsz,
seqlen,
self.n_layer * 2,
self.n_head,
self.n_embd
)
past_key_values = self.dropout(past_key_values)
past_key_values = past_key_values.permute([2, 0, 3, 1, 4]).split(2)
return past_key_values
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size = input_ids.shape[0]
past_key_values = self.get_prompt(batch_size=batch_size)
prefix_attention_mask = torch.ones(batch_size, self.pre_seq_len).to(self.roberta.device)
attention_mask = torch.cat((prefix_attention_mask, attention_mask), dim=1)
outputs = self.roberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
past_key_values=past_key_values,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class DebertaPrefixModel(DebertaPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.deberta = DebertaModel(config)
self.dropout = torch.nn.Dropout(config.hidden_dropout_prob)
self.qa_outputs = torch.nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
for param in self.deberta.parameters():
param.requires_grad = False
self.pre_seq_len = config.pre_seq_len
self.mid_dim = config.mid_dim
self.n_layer = config.num_hidden_layers
self.n_head = config.num_attention_heads
self.n_embd = config.hidden_size // config.num_attention_heads
# Use a two layered MLP to encode the prefix
self.prefix_tokens = torch.arange(self.pre_seq_len).long()
self.prefix_encoder = PrefixEncoder(config)
deberta_param = 0
for name, param in self.deberta.named_parameters():
deberta_param += param.numel()
all_param = 0
for name, param in self.named_parameters():
all_param += param.numel()
total_param = all_param - deberta_param
print('total param is {}'.format(total_param)) # 9860105
def get_prompt(self, batch_size):
prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(self.deberta.device)
past_key_values = self.prefix_encoder(prefix_tokens)
# bsz, seqlen, _ = past_key_values.shape
past_key_values = past_key_values.view(
batch_size,
self.pre_seq_len,
self.n_layer * 2,
self.n_head,
self.n_embd
)
past_key_values = self.dropout(past_key_values)
past_key_values = past_key_values.permute([2, 0, 3, 1, 4]).split(2)
return past_key_values
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
# head_mask=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size = input_ids.shape[0]
past_key_values = self.get_prompt(batch_size=batch_size)
prefix_attention_mask = torch.ones(batch_size, self.pre_seq_len).to(self.deberta.device)
attention_mask = torch.cat((prefix_attention_mask, attention_mask), dim=1)
outputs = self.deberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
past_key_values=past_key_values,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
| 42.341719
| 118
| 0.653661
| 2,515
| 20,197
| 4.968588
| 0.067992
| 0.038412
| 0.040573
| 0.019206
| 0.911892
| 0.90645
| 0.897567
| 0.890925
| 0.884843
| 0.884843
| 0
| 0.006753
| 0.266772
| 20,197
| 477
| 119
| 42.341719
| 0.837059
| 0.189632
| 0
| 0.845506
| 0
| 0
| 0.001435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036517
| false
| 0
| 0.019663
| 0
| 0.106742
| 0.002809
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3f1976f721f41575e9c2ba0e00ee84e903a9605
| 327,645
|
py
|
Python
|
ibm_cloud_networking_services/zones_settings_v1.py
|
IBM/networking-services-python-sdk
|
a19e47db6a5971562a502982d69a5868997245f3
|
[
"Apache-2.0"
] | 1
|
2022-03-15T02:13:25.000Z
|
2022-03-15T02:13:25.000Z
|
ibm_cloud_networking_services/zones_settings_v1.py
|
IBM/networking-services-python-sdk
|
a19e47db6a5971562a502982d69a5868997245f3
|
[
"Apache-2.0"
] | 57
|
2020-06-24T06:58:01.000Z
|
2022-03-28T14:52:33.000Z
|
ibm_cloud_networking_services/zones_settings_v1.py
|
IBM/networking-services-python-sdk
|
a19e47db6a5971562a502982d69a5868997245f3
|
[
"Apache-2.0"
] | 10
|
2020-06-23T04:09:28.000Z
|
2022-03-26T18:20:35.000Z
|
# coding: utf-8
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# IBM OpenAPI SDK Code Generator Version: 3.29.1-b338fb38-20210313-010605
"""
CIS Zones Settings
"""
from datetime import datetime
from enum import Enum
from typing import Dict, List
import json
from ibm_cloud_sdk_core import BaseService, DetailedResponse
from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator
from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment
from ibm_cloud_sdk_core.utils import convert_model, datetime_to_string, string_to_datetime
from .common import get_sdk_headers
##############################################################################
# Service
##############################################################################
class ZonesSettingsV1(BaseService):
"""The Zones Settings V1 service."""
DEFAULT_SERVICE_URL = 'https://api.cis.cloud.ibm.com'
DEFAULT_SERVICE_NAME = 'zones_settings'
@classmethod
def new_instance(cls,
crn: str,
zone_identifier: str,
service_name: str = DEFAULT_SERVICE_NAME,
) -> 'ZonesSettingsV1':
"""
Return a new client for the Zones Settings service using the specified
parameters and external configuration.
:param str crn: Full url-encoded cloud resource name (CRN) of resource
instance.
:param str zone_identifier: Zone identifier.
"""
if crn is None:
raise ValueError('crn must be provided')
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
authenticator = get_authenticator_from_environment(service_name)
service = cls(
crn,
zone_identifier,
authenticator
)
service.configure_service(service_name)
return service
def __init__(self,
crn: str,
zone_identifier: str,
authenticator: Authenticator = None,
) -> None:
"""
Construct a new client for the Zones Settings service.
:param str crn: Full url-encoded cloud resource name (CRN) of resource
instance.
:param str zone_identifier: Zone identifier.
:param Authenticator authenticator: The authenticator specifies the authentication mechanism.
Get up to date information from https://github.com/IBM/python-sdk-core/blob/master/README.md
about initializing the authenticator of your choice.
"""
if crn is None:
raise ValueError('crn must be provided')
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
BaseService.__init__(self,
service_url=self.DEFAULT_SERVICE_URL,
authenticator=authenticator)
self.crn = crn
self.zone_identifier = zone_identifier
#########################
# Zones Settings
#########################
def get_zone_dnssec(self,
**kwargs
) -> DetailedResponse:
"""
Get zone DNSSEC.
Get DNSSEC setting for a given zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZonesDnssecResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_zone_dnssec')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/dnssec'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_zone_dnssec(self,
*,
status: str = None,
**kwargs
) -> DetailedResponse:
"""
Update zone DNSSEC.
Update DNSSEC setting for given zone.
:param str status: (optional) Status.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZonesDnssecResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_zone_dnssec')
headers.update(sdk_headers)
data = {
'status': status
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/dnssec'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_zone_cname_flattening(self,
**kwargs
) -> DetailedResponse:
"""
Get zone CNAME flattening.
Get CNAME flattening setting for a given zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZonesCnameFlatteningResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_zone_cname_flattening')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/cname_flattening'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_zone_cname_flattening(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update zone CNAME flattening.
Update CNAME flattening setting for given zone.
:param str value: (optional) Valid values are "flatten_at_root",
"flatten_all". "flatten_at_root" - Flatten CNAME at root domain. This is
the default value. "flatten_all" - Flatten all CNAME records under your
domain.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZonesCnameFlatteningResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_zone_cname_flattening')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/cname_flattening'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_opportunistic_encryption(self,
**kwargs
) -> DetailedResponse:
"""
Get opportunistic encryption setting.
Get opportunistic encryption setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `OpportunisticEncryptionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_opportunistic_encryption')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/opportunistic_encryption'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_opportunistic_encryption(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update opportunistic encryption setting.
Update opportunistic encryption setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `OpportunisticEncryptionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_opportunistic_encryption')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/opportunistic_encryption'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_challenge_ttl(self,
**kwargs
) -> DetailedResponse:
"""
Get challenge TTL setting.
Get challenge TTL setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ChallengeTtlResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_challenge_ttl')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/challenge_ttl'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_challenge_ttl(self,
*,
value: int = None,
**kwargs
) -> DetailedResponse:
"""
Update challenge TTL setting.
Update challenge TTL setting for a zone.
:param int value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ChallengeTtlResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_challenge_ttl')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/challenge_ttl'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_automatic_https_rewrites(self,
**kwargs
) -> DetailedResponse:
"""
Get automatic https rewrites setting.
Get automatic https rewrites setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AutomaticHttpsRewritesResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_automatic_https_rewrites')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/automatic_https_rewrites'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_automatic_https_rewrites(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update automatic https rewrites setting.
Update automatic https rewrites setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AutomaticHttpsRewritesResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_automatic_https_rewrites')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/automatic_https_rewrites'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_true_client_ip(self,
**kwargs
) -> DetailedResponse:
"""
Get true client IP setting.
Get true client IP setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `TrueClientIpResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_true_client_ip')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/true_client_ip_header'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_true_client_ip(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update true client IP setting.
Update true client IP setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `TrueClientIpResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_true_client_ip')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/true_client_ip_header'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_always_use_https(self,
**kwargs
) -> DetailedResponse:
"""
Get always use https setting.
Get always use https setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AlwaysUseHttpsResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_always_use_https')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/always_use_https'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_always_use_https(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update always use https setting.
Update always use https setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AlwaysUseHttpsResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_always_use_https')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/always_use_https'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_image_size_optimization(self,
**kwargs
) -> DetailedResponse:
"""
Get image size optimization setting.
Get image size optimization setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ImageSizeOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_image_size_optimization')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/image_size_optimization'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_image_size_optimization(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update image size optimization setting.
Update image size optimization setting for a zone.
:param str value: (optional) Valid values are "lossy", "off", "lossless".
"lossy" - The file size of JPEG images is reduced using lossy compression,
which may reduce visual quality. "off" - Disable Image Size Optimization.
"lossless" - Reduce the size of image files without impacting visual
quality.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ImageSizeOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_image_size_optimization')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/image_size_optimization'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_script_load_optimization(self,
**kwargs
) -> DetailedResponse:
"""
Get script load optimization setting.
Get script load optimization setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ScriptLoadOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_script_load_optimization')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/script_load_optimization'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_script_load_optimization(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update script load optimization setting.
Update script load optimization setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ScriptLoadOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_script_load_optimization')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/script_load_optimization'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_image_load_optimization(self,
**kwargs
) -> DetailedResponse:
"""
Get image load optimizationn setting.
Get image load optimizationn setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ImageLoadOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_image_load_optimization')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/image_load_optimization'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_image_load_optimization(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update image load optimizationn setting.
Update image load optimizationn setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ImageLoadOptimizationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_image_load_optimization')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/image_load_optimization'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_minify(self,
**kwargs
) -> DetailedResponse:
"""
Get minify setting.
Get minify setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MinifyResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_minify')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/minify'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_minify(self,
*,
value: 'MinifySettingValue' = None,
**kwargs
) -> DetailedResponse:
"""
Update minify setting.
Update minify setting for a zone.
:param MinifySettingValue value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MinifyResp` object
"""
if value is not None:
value = convert_model(value)
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_minify')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/minify'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_min_tls_version(self,
**kwargs
) -> DetailedResponse:
"""
Get minimum TLS version setting.
Get minimum TLS version setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MinTlsVersionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_min_tls_version')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/min_tls_version'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_min_tls_version(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update minimum TLS version setting.
Update minimum TLS version setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MinTlsVersionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_min_tls_version')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/min_tls_version'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_ip_geolocation(self,
**kwargs
) -> DetailedResponse:
"""
Get IP geolocation setting.
Get IP geolocation setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `IpGeolocationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_ip_geolocation')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ip_geolocation'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_ip_geolocation(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update IP geolocation setting.
Update IP geolocation setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `IpGeolocationResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_ip_geolocation')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ip_geolocation'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_server_side_exclude(self,
**kwargs
) -> DetailedResponse:
"""
Get server side exclude setting.
Get server side exclude setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ServerSideExcludeResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_server_side_exclude')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/server_side_exclude'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_server_side_exclude(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update server side exclude setting.
Update server side exclude setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ServerSideExcludeResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_server_side_exclude')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/server_side_exclude'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_security_header(self,
**kwargs
) -> DetailedResponse:
"""
Get HTTP strict transport security setting.
Get HTTP strict transport security setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `SecurityHeaderResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_security_header')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/security_header'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_security_header(self,
*,
value: 'SecurityHeaderSettingValue' = None,
**kwargs
) -> DetailedResponse:
"""
Update HTTP strict transport security setting.
Update HTTP strict transport security setting for a zone.
:param SecurityHeaderSettingValue value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `SecurityHeaderResp` object
"""
if value is not None:
value = convert_model(value)
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_security_header')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/security_header'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_mobile_redirect(self,
**kwargs
) -> DetailedResponse:
"""
Get mobile redirect setting.
Get mobile redirect setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MobileRedirectResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_mobile_redirect')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/mobile_redirect'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_mobile_redirect(self,
*,
value: 'MobileRedirecSettingValue' = None,
**kwargs
) -> DetailedResponse:
"""
Update mobile redirect setting.
Update mobile redirect setting for a zone.
:param MobileRedirecSettingValue value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MobileRedirectResp` object
"""
if value is not None:
value = convert_model(value)
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_mobile_redirect')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/mobile_redirect'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_prefetch_preload(self,
**kwargs
) -> DetailedResponse:
"""
Get prefetch URLs from header setting.
Get prefetch URLs from header setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `PrefetchPreloadResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_prefetch_preload')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/prefetch_preload'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_prefetch_preload(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update prefetch URLs from header setting.
Update prefetch URLs from header setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `PrefetchPreloadResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_prefetch_preload')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/prefetch_preload'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_http2(self,
**kwargs
) -> DetailedResponse:
"""
Get http/2 setting.
Get http/2 setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Http2Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_http2')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/http2'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_http2(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update http/2 setting.
Update http/2 setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Http2Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_http2')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/http2'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_http3(self,
**kwargs
) -> DetailedResponse:
"""
Get http/3 setting.
Get http/3 setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Http3Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_http3')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/http3'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_http3(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update http/3 setting.
Update http/3 setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Http3Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_http3')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/http3'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_ipv6(self,
**kwargs
) -> DetailedResponse:
"""
Get IPv6 compatibility setting.
Get IPv6 compatibility setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Ipv6Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_ipv6')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ipv6'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_ipv6(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update IPv6 compatibility setting.
Update IPv6 compatibility setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Ipv6Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_ipv6')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ipv6'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_web_sockets(self,
**kwargs
) -> DetailedResponse:
"""
Get web sockets setting.
Get web sockets setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `WebsocketsResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_web_sockets')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/websockets'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_web_sockets(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update web sockets setting.
Update web sockets setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `WebsocketsResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_web_sockets')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/websockets'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_pseudo_ipv4(self,
**kwargs
) -> DetailedResponse:
"""
Get pseudo IPv4 setting.
Get pseudo IPv4 setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `PseudoIpv4Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_pseudo_ipv4')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/pseudo_ipv4'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_pseudo_ipv4(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update pseudo IPv4 setting.
Update pseudo IPv4 setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `PseudoIpv4Resp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_pseudo_ipv4')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/pseudo_ipv4'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_response_buffering(self,
**kwargs
) -> DetailedResponse:
"""
Get response buffering setting.
Get response buffering setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ResponseBufferingResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_response_buffering')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/response_buffering'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_response_buffering(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update response buffering setting.
Update response buffering setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ResponseBufferingResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_response_buffering')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/response_buffering'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_hotlink_protection(self,
**kwargs
) -> DetailedResponse:
"""
Get hotlink protection setting.
Get hotlink protection setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `HotlinkProtectionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_hotlink_protection')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/hotlink_protection'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_hotlink_protection(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update hotlink protection setting.
Update hotlink protection setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `HotlinkProtectionResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_hotlink_protection')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/hotlink_protection'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_max_upload(self,
**kwargs
) -> DetailedResponse:
"""
Get maximum upload size setting.
Get maximum upload size setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MaxUploadResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_max_upload')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/max_upload'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_max_upload(self,
*,
value: int = None,
**kwargs
) -> DetailedResponse:
"""
Update maximum upload size setting.
Update maximum upload size setting for a zone.
:param int value: (optional) Valid values(in MB) for "max_upload" are 100,
125, 150, 175, 200, 225, 250, 275, 300, 325, 350, 375, 400, 425, 450, 475,
500. Values 225, 250, 275, 300, 325, 350, 375, 400, 425, 450, 475, 500 are
only for Enterprise Plan.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MaxUploadResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_max_upload')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/max_upload'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_tls_client_auth(self,
**kwargs
) -> DetailedResponse:
"""
Get TLS Client Auth setting.
Get TLS Client Auth setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `TlsClientAuthResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_tls_client_auth')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/tls_client_auth'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_tls_client_auth(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update TLS Client Auth setting.
Update TLS Client Auth setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `TlsClientAuthResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_tls_client_auth')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/tls_client_auth'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_browser_check(self,
**kwargs
) -> DetailedResponse:
"""
Get browser check setting.
Get browser check setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `BrowserCheckResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_browser_check')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/browser_check'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_browser_check(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update browser check setting.
Update browser check setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `BrowserCheckResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_browser_check')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/browser_check'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_enable_error_pages_on(self,
**kwargs
) -> DetailedResponse:
"""
Get enable error pages on setting.
Get enable error pages on setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `OriginErrorPagePassThruResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_enable_error_pages_on')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/origin_error_page_pass_thru'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_enable_error_pages_on(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update enable error pages on setting.
Update enable error pages on setting for a zone.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `OriginErrorPagePassThruResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_enable_error_pages_on')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/origin_error_page_pass_thru'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_web_application_firewall(self,
**kwargs
) -> DetailedResponse:
"""
Get web application firewall setting.
Get web application firewall setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `WafResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_web_application_firewall')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/waf'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_web_application_firewall(self,
*,
value: str = None,
**kwargs
) -> DetailedResponse:
"""
Update web application firewall setting.
A Web Application Firewall (WAF) blocks requests that contain malicious content.
:param str value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `WafResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_web_application_firewall')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/waf'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_ciphers(self,
**kwargs
) -> DetailedResponse:
"""
Get ciphers setting.
Get ciphers setting for a zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `CiphersResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_ciphers')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ciphers'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_ciphers(self,
*,
value: List[str] = None,
**kwargs
) -> DetailedResponse:
"""
Update ciphers setting.
Update ciphers setting for a zone.
:param List[str] value: (optional) Value.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `CiphersResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_ciphers')
headers.update(sdk_headers)
data = {
'value': value
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['crn', 'zone_identifier']
path_param_values = self.encode_path_vars(self.crn, self.zone_identifier)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/{crn}/zones/{zone_identifier}/settings/ciphers'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
##############################################################################
# Models
##############################################################################
class AlwaysUseHttpsRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a AlwaysUseHttpsRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'AlwaysUseHttpsRespResult':
"""Initialize a AlwaysUseHttpsRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in AlwaysUseHttpsRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in AlwaysUseHttpsRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in AlwaysUseHttpsRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in AlwaysUseHttpsRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AlwaysUseHttpsRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AlwaysUseHttpsRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AlwaysUseHttpsRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AlwaysUseHttpsRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class AutomaticHttpsRewritesRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a AutomaticHttpsRewritesRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'AutomaticHttpsRewritesRespResult':
"""Initialize a AutomaticHttpsRewritesRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in AutomaticHttpsRewritesRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in AutomaticHttpsRewritesRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in AutomaticHttpsRewritesRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in AutomaticHttpsRewritesRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AutomaticHttpsRewritesRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AutomaticHttpsRewritesRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AutomaticHttpsRewritesRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AutomaticHttpsRewritesRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class BrowserCheckRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a BrowserCheckRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'BrowserCheckRespResult':
"""Initialize a BrowserCheckRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in BrowserCheckRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in BrowserCheckRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in BrowserCheckRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in BrowserCheckRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a BrowserCheckRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this BrowserCheckRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'BrowserCheckRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'BrowserCheckRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ChallengeTtlRespResult():
"""
Container for response information.
:attr str id: ID.
:attr int value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: int,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ChallengeTtlRespResult object.
:param str id: ID.
:param int value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ChallengeTtlRespResult':
"""Initialize a ChallengeTtlRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ChallengeTtlRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ChallengeTtlRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ChallengeTtlRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ChallengeTtlRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ChallengeTtlRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ChallengeTtlRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ChallengeTtlRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ChallengeTtlRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class CiphersRespResult():
"""
Container for response information.
:attr str id: ID.
:attr List[str] value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: List[str],
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a CiphersRespResult object.
:param str id: ID.
:param List[str] value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'CiphersRespResult':
"""Initialize a CiphersRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in CiphersRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in CiphersRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in CiphersRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in CiphersRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a CiphersRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this CiphersRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'CiphersRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'CiphersRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class HotlinkProtectionRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a HotlinkProtectionRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'HotlinkProtectionRespResult':
"""Initialize a HotlinkProtectionRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in HotlinkProtectionRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in HotlinkProtectionRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in HotlinkProtectionRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in HotlinkProtectionRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a HotlinkProtectionRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this HotlinkProtectionRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'HotlinkProtectionRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'HotlinkProtectionRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Http2RespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a Http2RespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'Http2RespResult':
"""Initialize a Http2RespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in Http2RespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in Http2RespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in Http2RespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in Http2RespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Http2RespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Http2RespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Http2RespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Http2RespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Http3RespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a Http3RespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'Http3RespResult':
"""Initialize a Http3RespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in Http3RespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in Http3RespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in Http3RespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in Http3RespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Http3RespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Http3RespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Http3RespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Http3RespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ImageLoadOptimizationRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ImageLoadOptimizationRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ImageLoadOptimizationRespResult':
"""Initialize a ImageLoadOptimizationRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ImageLoadOptimizationRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ImageLoadOptimizationRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ImageLoadOptimizationRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ImageLoadOptimizationRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ImageLoadOptimizationRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ImageLoadOptimizationRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ImageLoadOptimizationRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ImageLoadOptimizationRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ImageSizeOptimizationRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ImageSizeOptimizationRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ImageSizeOptimizationRespResult':
"""Initialize a ImageSizeOptimizationRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ImageSizeOptimizationRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ImageSizeOptimizationRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ImageSizeOptimizationRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ImageSizeOptimizationRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ImageSizeOptimizationRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ImageSizeOptimizationRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ImageSizeOptimizationRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ImageSizeOptimizationRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class IpGeolocationRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a IpGeolocationRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'IpGeolocationRespResult':
"""Initialize a IpGeolocationRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in IpGeolocationRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in IpGeolocationRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in IpGeolocationRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in IpGeolocationRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a IpGeolocationRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this IpGeolocationRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'IpGeolocationRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'IpGeolocationRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Ipv6RespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a Ipv6RespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'Ipv6RespResult':
"""Initialize a Ipv6RespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in Ipv6RespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in Ipv6RespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in Ipv6RespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in Ipv6RespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Ipv6RespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Ipv6RespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Ipv6RespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Ipv6RespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MaxUploadRespResult():
"""
Container for response information.
:attr str id: ID.
:attr int value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: int,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a MaxUploadRespResult object.
:param str id: ID.
:param int value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'MaxUploadRespResult':
"""Initialize a MaxUploadRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in MaxUploadRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in MaxUploadRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in MaxUploadRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in MaxUploadRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MaxUploadRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MaxUploadRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MaxUploadRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MaxUploadRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinTlsVersionRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a MinTlsVersionRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinTlsVersionRespResult':
"""Initialize a MinTlsVersionRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in MinTlsVersionRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in MinTlsVersionRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in MinTlsVersionRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in MinTlsVersionRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinTlsVersionRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinTlsVersionRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinTlsVersionRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinTlsVersionRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinifyRespResult():
"""
Container for response information.
:attr str id: ID.
:attr MinifyRespResultValue value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: 'MinifyRespResultValue',
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a MinifyRespResult object.
:param str id: ID.
:param MinifyRespResultValue value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinifyRespResult':
"""Initialize a MinifyRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in MinifyRespResult JSON')
if 'value' in _dict:
args['value'] = MinifyRespResultValue.from_dict(_dict.get('value'))
else:
raise ValueError('Required property \'value\' not present in MinifyRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in MinifyRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in MinifyRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinifyRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value.to_dict()
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinifyRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinifyRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinifyRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinifyRespResultValue():
"""
Value.
:attr str css: css.
:attr str html: html.
:attr str js: js.
"""
def __init__(self,
css: str,
html: str,
js: str) -> None:
"""
Initialize a MinifyRespResultValue object.
:param str css: css.
:param str html: html.
:param str js: js.
"""
self.css = css
self.html = html
self.js = js
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinifyRespResultValue':
"""Initialize a MinifyRespResultValue object from a json dictionary."""
args = {}
if 'css' in _dict:
args['css'] = _dict.get('css')
else:
raise ValueError('Required property \'css\' not present in MinifyRespResultValue JSON')
if 'html' in _dict:
args['html'] = _dict.get('html')
else:
raise ValueError('Required property \'html\' not present in MinifyRespResultValue JSON')
if 'js' in _dict:
args['js'] = _dict.get('js')
else:
raise ValueError('Required property \'js\' not present in MinifyRespResultValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinifyRespResultValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'css') and self.css is not None:
_dict['css'] = self.css
if hasattr(self, 'html') and self.html is not None:
_dict['html'] = self.html
if hasattr(self, 'js') and self.js is not None:
_dict['js'] = self.js
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinifyRespResultValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinifyRespResultValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinifyRespResultValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinifySettingValue():
"""
Value.
:attr str css: Automatically minify all CSS for your website.
:attr str html: Automatically minify all HTML for your website.
:attr str js: Automatically minify all JavaScript for your website.
"""
def __init__(self,
css: str,
html: str,
js: str) -> None:
"""
Initialize a MinifySettingValue object.
:param str css: Automatically minify all CSS for your website.
:param str html: Automatically minify all HTML for your website.
:param str js: Automatically minify all JavaScript for your website.
"""
self.css = css
self.html = html
self.js = js
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinifySettingValue':
"""Initialize a MinifySettingValue object from a json dictionary."""
args = {}
if 'css' in _dict:
args['css'] = _dict.get('css')
else:
raise ValueError('Required property \'css\' not present in MinifySettingValue JSON')
if 'html' in _dict:
args['html'] = _dict.get('html')
else:
raise ValueError('Required property \'html\' not present in MinifySettingValue JSON')
if 'js' in _dict:
args['js'] = _dict.get('js')
else:
raise ValueError('Required property \'js\' not present in MinifySettingValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinifySettingValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'css') and self.css is not None:
_dict['css'] = self.css
if hasattr(self, 'html') and self.html is not None:
_dict['html'] = self.html
if hasattr(self, 'js') and self.js is not None:
_dict['js'] = self.js
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinifySettingValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinifySettingValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinifySettingValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class CssEnum(str, Enum):
"""
Automatically minify all CSS for your website.
"""
ON = 'on'
OFF = 'off'
class HtmlEnum(str, Enum):
"""
Automatically minify all HTML for your website.
"""
ON = 'on'
OFF = 'off'
class JsEnum(str, Enum):
"""
Automatically minify all JavaScript for your website.
"""
ON = 'on'
OFF = 'off'
class MobileRedirecSettingValue():
"""
Value.
:attr str status: Whether or not the mobile redirection is enabled.
:attr str mobile_subdomain: Which subdomain prefix you wish to redirect visitors
on mobile devices to.
:attr bool strip_uri: Whether to drop the current page path and redirect to the
mobile subdomain URL root or to keep the path and redirect to the same page on
the mobile subdomain.
"""
def __init__(self,
status: str,
mobile_subdomain: str,
strip_uri: bool) -> None:
"""
Initialize a MobileRedirecSettingValue object.
:param str status: Whether or not the mobile redirection is enabled.
:param str mobile_subdomain: Which subdomain prefix you wish to redirect
visitors on mobile devices to.
:param bool strip_uri: Whether to drop the current page path and redirect
to the mobile subdomain URL root or to keep the path and redirect to the
same page on the mobile subdomain.
"""
self.status = status
self.mobile_subdomain = mobile_subdomain
self.strip_uri = strip_uri
@classmethod
def from_dict(cls, _dict: Dict) -> 'MobileRedirecSettingValue':
"""Initialize a MobileRedirecSettingValue object from a json dictionary."""
args = {}
if 'status' in _dict:
args['status'] = _dict.get('status')
else:
raise ValueError('Required property \'status\' not present in MobileRedirecSettingValue JSON')
if 'mobile_subdomain' in _dict:
args['mobile_subdomain'] = _dict.get('mobile_subdomain')
else:
raise ValueError('Required property \'mobile_subdomain\' not present in MobileRedirecSettingValue JSON')
if 'strip_uri' in _dict:
args['strip_uri'] = _dict.get('strip_uri')
else:
raise ValueError('Required property \'strip_uri\' not present in MobileRedirecSettingValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MobileRedirecSettingValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'mobile_subdomain') and self.mobile_subdomain is not None:
_dict['mobile_subdomain'] = self.mobile_subdomain
if hasattr(self, 'strip_uri') and self.strip_uri is not None:
_dict['strip_uri'] = self.strip_uri
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MobileRedirecSettingValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MobileRedirecSettingValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MobileRedirecSettingValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class StatusEnum(str, Enum):
"""
Whether or not the mobile redirection is enabled.
"""
ON = 'on'
OFF = 'off'
class MobileRedirectRespResult():
"""
Container for response information.
:attr str id: ID.
:attr MobileRedirectRespResultValue value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: 'MobileRedirectRespResultValue',
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a MobileRedirectRespResult object.
:param str id: ID.
:param MobileRedirectRespResultValue value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'MobileRedirectRespResult':
"""Initialize a MobileRedirectRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in MobileRedirectRespResult JSON')
if 'value' in _dict:
args['value'] = MobileRedirectRespResultValue.from_dict(_dict.get('value'))
else:
raise ValueError('Required property \'value\' not present in MobileRedirectRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in MobileRedirectRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in MobileRedirectRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MobileRedirectRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value.to_dict()
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MobileRedirectRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MobileRedirectRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MobileRedirectRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MobileRedirectRespResultValue():
"""
Value.
:attr str status: Whether or not the mobile redirection is enabled.
:attr str mobile_subdomain: Which subdomain prefix you wish to redirect visitors
on mobile devices to.
:attr bool strip_uri: Whether to drop the current page path and redirect to the
mobile subdomain URL root or to keep the path and redirect to the same page on
the mobile subdomain.
"""
def __init__(self,
status: str,
mobile_subdomain: str,
strip_uri: bool) -> None:
"""
Initialize a MobileRedirectRespResultValue object.
:param str status: Whether or not the mobile redirection is enabled.
:param str mobile_subdomain: Which subdomain prefix you wish to redirect
visitors on mobile devices to.
:param bool strip_uri: Whether to drop the current page path and redirect
to the mobile subdomain URL root or to keep the path and redirect to the
same page on the mobile subdomain.
"""
self.status = status
self.mobile_subdomain = mobile_subdomain
self.strip_uri = strip_uri
@classmethod
def from_dict(cls, _dict: Dict) -> 'MobileRedirectRespResultValue':
"""Initialize a MobileRedirectRespResultValue object from a json dictionary."""
args = {}
if 'status' in _dict:
args['status'] = _dict.get('status')
else:
raise ValueError('Required property \'status\' not present in MobileRedirectRespResultValue JSON')
if 'mobile_subdomain' in _dict:
args['mobile_subdomain'] = _dict.get('mobile_subdomain')
else:
raise ValueError('Required property \'mobile_subdomain\' not present in MobileRedirectRespResultValue JSON')
if 'strip_uri' in _dict:
args['strip_uri'] = _dict.get('strip_uri')
else:
raise ValueError('Required property \'strip_uri\' not present in MobileRedirectRespResultValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MobileRedirectRespResultValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'mobile_subdomain') and self.mobile_subdomain is not None:
_dict['mobile_subdomain'] = self.mobile_subdomain
if hasattr(self, 'strip_uri') and self.strip_uri is not None:
_dict['strip_uri'] = self.strip_uri
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MobileRedirectRespResultValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MobileRedirectRespResultValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MobileRedirectRespResultValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OpportunisticEncryptionRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a OpportunisticEncryptionRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'OpportunisticEncryptionRespResult':
"""Initialize a OpportunisticEncryptionRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in OpportunisticEncryptionRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in OpportunisticEncryptionRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in OpportunisticEncryptionRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in OpportunisticEncryptionRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OpportunisticEncryptionRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OpportunisticEncryptionRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OpportunisticEncryptionRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OpportunisticEncryptionRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OriginErrorPagePassThruRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a OriginErrorPagePassThruRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'OriginErrorPagePassThruRespResult':
"""Initialize a OriginErrorPagePassThruRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in OriginErrorPagePassThruRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in OriginErrorPagePassThruRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in OriginErrorPagePassThruRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in OriginErrorPagePassThruRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OriginErrorPagePassThruRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OriginErrorPagePassThruRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OriginErrorPagePassThruRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OriginErrorPagePassThruRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PrefetchPreloadRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a PrefetchPreloadRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'PrefetchPreloadRespResult':
"""Initialize a PrefetchPreloadRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in PrefetchPreloadRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in PrefetchPreloadRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in PrefetchPreloadRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in PrefetchPreloadRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PrefetchPreloadRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PrefetchPreloadRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PrefetchPreloadRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PrefetchPreloadRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PseudoIpv4RespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a PseudoIpv4RespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'PseudoIpv4RespResult':
"""Initialize a PseudoIpv4RespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in PseudoIpv4RespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in PseudoIpv4RespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in PseudoIpv4RespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in PseudoIpv4RespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PseudoIpv4RespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PseudoIpv4RespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PseudoIpv4RespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PseudoIpv4RespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResponseBufferingRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ResponseBufferingRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResponseBufferingRespResult':
"""Initialize a ResponseBufferingRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ResponseBufferingRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ResponseBufferingRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ResponseBufferingRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ResponseBufferingRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResponseBufferingRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResponseBufferingRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResponseBufferingRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResponseBufferingRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ScriptLoadOptimizationRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ScriptLoadOptimizationRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ScriptLoadOptimizationRespResult':
"""Initialize a ScriptLoadOptimizationRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ScriptLoadOptimizationRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ScriptLoadOptimizationRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ScriptLoadOptimizationRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ScriptLoadOptimizationRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ScriptLoadOptimizationRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ScriptLoadOptimizationRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ScriptLoadOptimizationRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ScriptLoadOptimizationRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderRespResult():
"""
Container for response information.
:attr str id: ID.
:attr SecurityHeaderRespResultValue value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: 'SecurityHeaderRespResultValue',
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a SecurityHeaderRespResult object.
:param str id: ID.
:param SecurityHeaderRespResultValue value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderRespResult':
"""Initialize a SecurityHeaderRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in SecurityHeaderRespResult JSON')
if 'value' in _dict:
args['value'] = SecurityHeaderRespResultValue.from_dict(_dict.get('value'))
else:
raise ValueError('Required property \'value\' not present in SecurityHeaderRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in SecurityHeaderRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in SecurityHeaderRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value.to_dict()
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderRespResultValue():
"""
Value.
:attr SecurityHeaderRespResultValueStrictTransportSecurity
strict_transport_security: Strict transport security.
"""
def __init__(self,
strict_transport_security: 'SecurityHeaderRespResultValueStrictTransportSecurity') -> None:
"""
Initialize a SecurityHeaderRespResultValue object.
:param SecurityHeaderRespResultValueStrictTransportSecurity
strict_transport_security: Strict transport security.
"""
self.strict_transport_security = strict_transport_security
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderRespResultValue':
"""Initialize a SecurityHeaderRespResultValue object from a json dictionary."""
args = {}
if 'strict_transport_security' in _dict:
args['strict_transport_security'] = SecurityHeaderRespResultValueStrictTransportSecurity.from_dict(_dict.get('strict_transport_security'))
else:
raise ValueError('Required property \'strict_transport_security\' not present in SecurityHeaderRespResultValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderRespResultValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'strict_transport_security') and self.strict_transport_security is not None:
_dict['strict_transport_security'] = self.strict_transport_security.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderRespResultValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderRespResultValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderRespResultValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderRespResultValueStrictTransportSecurity():
"""
Strict transport security.
:attr bool enabled: Whether or not security header is enabled.
:attr int max_age: Max age in seconds.
:attr bool include_subdomains: Include all subdomains.
:attr bool nosniff: Whether or not to include 'X-Content-Type-Options:nosniff'
header.
"""
def __init__(self,
enabled: bool,
max_age: int,
include_subdomains: bool,
nosniff: bool) -> None:
"""
Initialize a SecurityHeaderRespResultValueStrictTransportSecurity object.
:param bool enabled: Whether or not security header is enabled.
:param int max_age: Max age in seconds.
:param bool include_subdomains: Include all subdomains.
:param bool nosniff: Whether or not to include
'X-Content-Type-Options:nosniff' header.
"""
self.enabled = enabled
self.max_age = max_age
self.include_subdomains = include_subdomains
self.nosniff = nosniff
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderRespResultValueStrictTransportSecurity':
"""Initialize a SecurityHeaderRespResultValueStrictTransportSecurity object from a json dictionary."""
args = {}
if 'enabled' in _dict:
args['enabled'] = _dict.get('enabled')
else:
raise ValueError('Required property \'enabled\' not present in SecurityHeaderRespResultValueStrictTransportSecurity JSON')
if 'max_age' in _dict:
args['max_age'] = _dict.get('max_age')
else:
raise ValueError('Required property \'max_age\' not present in SecurityHeaderRespResultValueStrictTransportSecurity JSON')
if 'include_subdomains' in _dict:
args['include_subdomains'] = _dict.get('include_subdomains')
else:
raise ValueError('Required property \'include_subdomains\' not present in SecurityHeaderRespResultValueStrictTransportSecurity JSON')
if 'nosniff' in _dict:
args['nosniff'] = _dict.get('nosniff')
else:
raise ValueError('Required property \'nosniff\' not present in SecurityHeaderRespResultValueStrictTransportSecurity JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderRespResultValueStrictTransportSecurity object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'enabled') and self.enabled is not None:
_dict['enabled'] = self.enabled
if hasattr(self, 'max_age') and self.max_age is not None:
_dict['max_age'] = self.max_age
if hasattr(self, 'include_subdomains') and self.include_subdomains is not None:
_dict['include_subdomains'] = self.include_subdomains
if hasattr(self, 'nosniff') and self.nosniff is not None:
_dict['nosniff'] = self.nosniff
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderRespResultValueStrictTransportSecurity object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderRespResultValueStrictTransportSecurity') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderRespResultValueStrictTransportSecurity') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderSettingValue():
"""
Value.
:attr SecurityHeaderSettingValueStrictTransportSecurity
strict_transport_security: Strict transport security.
"""
def __init__(self,
strict_transport_security: 'SecurityHeaderSettingValueStrictTransportSecurity') -> None:
"""
Initialize a SecurityHeaderSettingValue object.
:param SecurityHeaderSettingValueStrictTransportSecurity
strict_transport_security: Strict transport security.
"""
self.strict_transport_security = strict_transport_security
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderSettingValue':
"""Initialize a SecurityHeaderSettingValue object from a json dictionary."""
args = {}
if 'strict_transport_security' in _dict:
args['strict_transport_security'] = SecurityHeaderSettingValueStrictTransportSecurity.from_dict(_dict.get('strict_transport_security'))
else:
raise ValueError('Required property \'strict_transport_security\' not present in SecurityHeaderSettingValue JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderSettingValue object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'strict_transport_security') and self.strict_transport_security is not None:
_dict['strict_transport_security'] = self.strict_transport_security.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderSettingValue object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderSettingValue') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderSettingValue') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderSettingValueStrictTransportSecurity():
"""
Strict transport security.
:attr bool enabled: Whether or not security header is enabled.
:attr int max_age: Max age in seconds.
:attr bool include_subdomains: Include all subdomains.
:attr bool nosniff: Whether or not to include 'X-Content-Type-Options:nosniff'
header.
"""
def __init__(self,
enabled: bool,
max_age: int,
include_subdomains: bool,
nosniff: bool) -> None:
"""
Initialize a SecurityHeaderSettingValueStrictTransportSecurity object.
:param bool enabled: Whether or not security header is enabled.
:param int max_age: Max age in seconds.
:param bool include_subdomains: Include all subdomains.
:param bool nosniff: Whether or not to include
'X-Content-Type-Options:nosniff' header.
"""
self.enabled = enabled
self.max_age = max_age
self.include_subdomains = include_subdomains
self.nosniff = nosniff
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderSettingValueStrictTransportSecurity':
"""Initialize a SecurityHeaderSettingValueStrictTransportSecurity object from a json dictionary."""
args = {}
if 'enabled' in _dict:
args['enabled'] = _dict.get('enabled')
else:
raise ValueError('Required property \'enabled\' not present in SecurityHeaderSettingValueStrictTransportSecurity JSON')
if 'max_age' in _dict:
args['max_age'] = _dict.get('max_age')
else:
raise ValueError('Required property \'max_age\' not present in SecurityHeaderSettingValueStrictTransportSecurity JSON')
if 'include_subdomains' in _dict:
args['include_subdomains'] = _dict.get('include_subdomains')
else:
raise ValueError('Required property \'include_subdomains\' not present in SecurityHeaderSettingValueStrictTransportSecurity JSON')
if 'nosniff' in _dict:
args['nosniff'] = _dict.get('nosniff')
else:
raise ValueError('Required property \'nosniff\' not present in SecurityHeaderSettingValueStrictTransportSecurity JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderSettingValueStrictTransportSecurity object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'enabled') and self.enabled is not None:
_dict['enabled'] = self.enabled
if hasattr(self, 'max_age') and self.max_age is not None:
_dict['max_age'] = self.max_age
if hasattr(self, 'include_subdomains') and self.include_subdomains is not None:
_dict['include_subdomains'] = self.include_subdomains
if hasattr(self, 'nosniff') and self.nosniff is not None:
_dict['nosniff'] = self.nosniff
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderSettingValueStrictTransportSecurity object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderSettingValueStrictTransportSecurity') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderSettingValueStrictTransportSecurity') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ServerSideExcludeRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a ServerSideExcludeRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'ServerSideExcludeRespResult':
"""Initialize a ServerSideExcludeRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ServerSideExcludeRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ServerSideExcludeRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in ServerSideExcludeRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in ServerSideExcludeRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ServerSideExcludeRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ServerSideExcludeRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ServerSideExcludeRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ServerSideExcludeRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TlsClientAuthRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a TlsClientAuthRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'TlsClientAuthRespResult':
"""Initialize a TlsClientAuthRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in TlsClientAuthRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in TlsClientAuthRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in TlsClientAuthRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in TlsClientAuthRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TlsClientAuthRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TlsClientAuthRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TlsClientAuthRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TlsClientAuthRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TrueClientIpRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a TrueClientIpRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'TrueClientIpRespResult':
"""Initialize a TrueClientIpRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in TrueClientIpRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in TrueClientIpRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in TrueClientIpRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in TrueClientIpRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TrueClientIpRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TrueClientIpRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TrueClientIpRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TrueClientIpRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class WafRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a WafRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'WafRespResult':
"""Initialize a WafRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in WafRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in WafRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in WafRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in WafRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a WafRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this WafRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'WafRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'WafRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class WebsocketsRespResult():
"""
Container for response information.
:attr str id: ID.
:attr str value: Value.
:attr bool editable: Editable.
:attr datetime modified_on: Modified date.
"""
def __init__(self,
id: str,
value: str,
editable: bool,
modified_on: datetime) -> None:
"""
Initialize a WebsocketsRespResult object.
:param str id: ID.
:param str value: Value.
:param bool editable: Editable.
:param datetime modified_on: Modified date.
"""
self.id = id
self.value = value
self.editable = editable
self.modified_on = modified_on
@classmethod
def from_dict(cls, _dict: Dict) -> 'WebsocketsRespResult':
"""Initialize a WebsocketsRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in WebsocketsRespResult JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in WebsocketsRespResult JSON')
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
else:
raise ValueError('Required property \'editable\' not present in WebsocketsRespResult JSON')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
else:
raise ValueError('Required property \'modified_on\' not present in WebsocketsRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a WebsocketsRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this WebsocketsRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'WebsocketsRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'WebsocketsRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZonesDnssecRespResult():
"""
Container for response information.
:attr str status: (optional) Status.
:attr int flags: (optional) Flags.
:attr str algorithm: (optional) Algorithm.
:attr str key_type: (optional) Key type.
:attr str digest_type: (optional) Digest type.
:attr str digest_algorithm: (optional) Digest algorithm.
:attr str digest: (optional) Digest.
:attr str ds: (optional) DS.
:attr int key_tag: (optional) Key tag.
:attr str public_key: (optional) Public key.
"""
def __init__(self,
*,
status: str = None,
flags: int = None,
algorithm: str = None,
key_type: str = None,
digest_type: str = None,
digest_algorithm: str = None,
digest: str = None,
ds: str = None,
key_tag: int = None,
public_key: str = None) -> None:
"""
Initialize a ZonesDnssecRespResult object.
:param str status: (optional) Status.
:param int flags: (optional) Flags.
:param str algorithm: (optional) Algorithm.
:param str key_type: (optional) Key type.
:param str digest_type: (optional) Digest type.
:param str digest_algorithm: (optional) Digest algorithm.
:param str digest: (optional) Digest.
:param str ds: (optional) DS.
:param int key_tag: (optional) Key tag.
:param str public_key: (optional) Public key.
"""
self.status = status
self.flags = flags
self.algorithm = algorithm
self.key_type = key_type
self.digest_type = digest_type
self.digest_algorithm = digest_algorithm
self.digest = digest
self.ds = ds
self.key_tag = key_tag
self.public_key = public_key
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZonesDnssecRespResult':
"""Initialize a ZonesDnssecRespResult object from a json dictionary."""
args = {}
if 'status' in _dict:
args['status'] = _dict.get('status')
if 'flags' in _dict:
args['flags'] = _dict.get('flags')
if 'algorithm' in _dict:
args['algorithm'] = _dict.get('algorithm')
if 'key_type' in _dict:
args['key_type'] = _dict.get('key_type')
if 'digest_type' in _dict:
args['digest_type'] = _dict.get('digest_type')
if 'digest_algorithm' in _dict:
args['digest_algorithm'] = _dict.get('digest_algorithm')
if 'digest' in _dict:
args['digest'] = _dict.get('digest')
if 'ds' in _dict:
args['ds'] = _dict.get('ds')
if 'key_tag' in _dict:
args['key_tag'] = _dict.get('key_tag')
if 'public_key' in _dict:
args['public_key'] = _dict.get('public_key')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZonesDnssecRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'flags') and self.flags is not None:
_dict['flags'] = self.flags
if hasattr(self, 'algorithm') and self.algorithm is not None:
_dict['algorithm'] = self.algorithm
if hasattr(self, 'key_type') and self.key_type is not None:
_dict['key_type'] = self.key_type
if hasattr(self, 'digest_type') and self.digest_type is not None:
_dict['digest_type'] = self.digest_type
if hasattr(self, 'digest_algorithm') and self.digest_algorithm is not None:
_dict['digest_algorithm'] = self.digest_algorithm
if hasattr(self, 'digest') and self.digest is not None:
_dict['digest'] = self.digest
if hasattr(self, 'ds') and self.ds is not None:
_dict['ds'] = self.ds
if hasattr(self, 'key_tag') and self.key_tag is not None:
_dict['key_tag'] = self.key_tag
if hasattr(self, 'public_key') and self.public_key is not None:
_dict['public_key'] = self.public_key
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZonesDnssecRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZonesDnssecRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZonesDnssecRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class StatusEnum(str, Enum):
"""
Status.
"""
ACTIVE = 'active'
DISABLED = 'disabled'
PENDING = 'pending'
PENDING_DISABLED = 'pending-disabled'
ERROR = 'error'
class AlwaysUseHttpsResp():
"""
Always use http response.
:attr AlwaysUseHttpsRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'AlwaysUseHttpsRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a AlwaysUseHttpsResp object.
:param AlwaysUseHttpsRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'AlwaysUseHttpsResp':
"""Initialize a AlwaysUseHttpsResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = AlwaysUseHttpsRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in AlwaysUseHttpsResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in AlwaysUseHttpsResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in AlwaysUseHttpsResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in AlwaysUseHttpsResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AlwaysUseHttpsResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AlwaysUseHttpsResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AlwaysUseHttpsResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AlwaysUseHttpsResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class AutomaticHttpsRewritesResp():
"""
automatic https rewrite response.
:attr AutomaticHttpsRewritesRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'AutomaticHttpsRewritesRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a AutomaticHttpsRewritesResp object.
:param AutomaticHttpsRewritesRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'AutomaticHttpsRewritesResp':
"""Initialize a AutomaticHttpsRewritesResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = AutomaticHttpsRewritesRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in AutomaticHttpsRewritesResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in AutomaticHttpsRewritesResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in AutomaticHttpsRewritesResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in AutomaticHttpsRewritesResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AutomaticHttpsRewritesResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AutomaticHttpsRewritesResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AutomaticHttpsRewritesResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AutomaticHttpsRewritesResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class BrowserCheckResp():
"""
Browser Check response.
:attr BrowserCheckRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'BrowserCheckRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a BrowserCheckResp object.
:param BrowserCheckRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'BrowserCheckResp':
"""Initialize a BrowserCheckResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = BrowserCheckRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in BrowserCheckResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in BrowserCheckResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in BrowserCheckResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in BrowserCheckResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a BrowserCheckResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this BrowserCheckResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'BrowserCheckResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'BrowserCheckResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ChallengeTtlResp():
"""
challenge TTL response.
:attr ChallengeTtlRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ChallengeTtlRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ChallengeTtlResp object.
:param ChallengeTtlRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ChallengeTtlResp':
"""Initialize a ChallengeTtlResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ChallengeTtlRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ChallengeTtlResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ChallengeTtlResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ChallengeTtlResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ChallengeTtlResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ChallengeTtlResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ChallengeTtlResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ChallengeTtlResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ChallengeTtlResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class CiphersResp():
"""
Ciphers response.
:attr CiphersRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'CiphersRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a CiphersResp object.
:param CiphersRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'CiphersResp':
"""Initialize a CiphersResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = CiphersRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in CiphersResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in CiphersResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in CiphersResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in CiphersResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a CiphersResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this CiphersResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'CiphersResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'CiphersResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class CnameFlatteningResponse():
"""
CNAME Flattening response.
:attr str id: (optional) id.
:attr str value: (optional) value.
:attr datetime modified_on: (optional) Date when it is modified.
:attr bool editable: (optional) editable.
"""
def __init__(self,
*,
id: str = None,
value: str = None,
modified_on: datetime = None,
editable: bool = None) -> None:
"""
Initialize a CnameFlatteningResponse object.
:param str id: (optional) id.
:param str value: (optional) value.
:param datetime modified_on: (optional) Date when it is modified.
:param bool editable: (optional) editable.
"""
self.id = id
self.value = value
self.modified_on = modified_on
self.editable = editable
@classmethod
def from_dict(cls, _dict: Dict) -> 'CnameFlatteningResponse':
"""Initialize a CnameFlatteningResponse object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
if 'value' in _dict:
args['value'] = _dict.get('value')
if 'modified_on' in _dict:
args['modified_on'] = string_to_datetime(_dict.get('modified_on'))
if 'editable' in _dict:
args['editable'] = _dict.get('editable')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a CnameFlatteningResponse object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = datetime_to_string(self.modified_on)
if hasattr(self, 'editable') and self.editable is not None:
_dict['editable'] = self.editable
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this CnameFlatteningResponse object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'CnameFlatteningResponse') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'CnameFlatteningResponse') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ValueEnum(str, Enum):
"""
value.
"""
FLATTEN_ALL = 'flatten_all'
FLATTEN_AT_ROOT = 'flatten_at_root'
class HotlinkProtectionResp():
"""
Hotlink Protection response.
:attr HotlinkProtectionRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'HotlinkProtectionRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a HotlinkProtectionResp object.
:param HotlinkProtectionRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'HotlinkProtectionResp':
"""Initialize a HotlinkProtectionResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = HotlinkProtectionRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in HotlinkProtectionResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in HotlinkProtectionResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in HotlinkProtectionResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in HotlinkProtectionResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a HotlinkProtectionResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this HotlinkProtectionResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'HotlinkProtectionResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'HotlinkProtectionResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Http2Resp():
"""
HTTP2 Response.
:attr Http2RespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'Http2RespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a Http2Resp object.
:param Http2RespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'Http2Resp':
"""Initialize a Http2Resp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = Http2RespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in Http2Resp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in Http2Resp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in Http2Resp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in Http2Resp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Http2Resp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Http2Resp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Http2Resp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Http2Resp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Http3Resp():
"""
HTTP3 Response.
:attr Http3RespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'Http3RespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a Http3Resp object.
:param Http3RespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'Http3Resp':
"""Initialize a Http3Resp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = Http3RespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in Http3Resp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in Http3Resp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in Http3Resp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in Http3Resp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Http3Resp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Http3Resp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Http3Resp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Http3Resp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ImageLoadOptimizationResp():
"""
Image Load Optimization response.
:attr ImageLoadOptimizationRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ImageLoadOptimizationRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ImageLoadOptimizationResp object.
:param ImageLoadOptimizationRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ImageLoadOptimizationResp':
"""Initialize a ImageLoadOptimizationResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ImageLoadOptimizationRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ImageLoadOptimizationResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ImageLoadOptimizationResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ImageLoadOptimizationResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ImageLoadOptimizationResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ImageLoadOptimizationResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ImageLoadOptimizationResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ImageLoadOptimizationResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ImageLoadOptimizationResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ImageSizeOptimizationResp():
"""
Image size optimization response.
:attr ImageSizeOptimizationRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ImageSizeOptimizationRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ImageSizeOptimizationResp object.
:param ImageSizeOptimizationRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ImageSizeOptimizationResp':
"""Initialize a ImageSizeOptimizationResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ImageSizeOptimizationRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ImageSizeOptimizationResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ImageSizeOptimizationResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ImageSizeOptimizationResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ImageSizeOptimizationResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ImageSizeOptimizationResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ImageSizeOptimizationResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ImageSizeOptimizationResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ImageSizeOptimizationResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class IpGeolocationResp():
"""
IP Geolocation response.
:attr IpGeolocationRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'IpGeolocationRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a IpGeolocationResp object.
:param IpGeolocationRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'IpGeolocationResp':
"""Initialize a IpGeolocationResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = IpGeolocationRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in IpGeolocationResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in IpGeolocationResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in IpGeolocationResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in IpGeolocationResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a IpGeolocationResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this IpGeolocationResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'IpGeolocationResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'IpGeolocationResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Ipv6Resp():
"""
IPv6 Response.
:attr Ipv6RespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'Ipv6RespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a Ipv6Resp object.
:param Ipv6RespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'Ipv6Resp':
"""Initialize a Ipv6Resp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = Ipv6RespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in Ipv6Resp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in Ipv6Resp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in Ipv6Resp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in Ipv6Resp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Ipv6Resp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Ipv6Resp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Ipv6Resp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Ipv6Resp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MaxUploadResp():
"""
Maximum upload response.
:attr MaxUploadRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'MaxUploadRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a MaxUploadResp object.
:param MaxUploadRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'MaxUploadResp':
"""Initialize a MaxUploadResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = MaxUploadRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in MaxUploadResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in MaxUploadResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in MaxUploadResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in MaxUploadResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MaxUploadResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MaxUploadResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MaxUploadResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MaxUploadResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinTlsVersionResp():
"""
Minimum TLS Version response.
:attr MinTlsVersionRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'MinTlsVersionRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a MinTlsVersionResp object.
:param MinTlsVersionRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinTlsVersionResp':
"""Initialize a MinTlsVersionResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = MinTlsVersionRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in MinTlsVersionResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in MinTlsVersionResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in MinTlsVersionResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in MinTlsVersionResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinTlsVersionResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinTlsVersionResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinTlsVersionResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinTlsVersionResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MinifyResp():
"""
Minify response.
:attr MinifyRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'MinifyRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a MinifyResp object.
:param MinifyRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'MinifyResp':
"""Initialize a MinifyResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = MinifyRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in MinifyResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in MinifyResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in MinifyResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in MinifyResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MinifyResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MinifyResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MinifyResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MinifyResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MobileRedirectResp():
"""
Mobile Redirect Response.
:attr MobileRedirectRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'MobileRedirectRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a MobileRedirectResp object.
:param MobileRedirectRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'MobileRedirectResp':
"""Initialize a MobileRedirectResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = MobileRedirectRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in MobileRedirectResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in MobileRedirectResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in MobileRedirectResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in MobileRedirectResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MobileRedirectResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MobileRedirectResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MobileRedirectResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MobileRedirectResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OpportunisticEncryptionResp():
"""
Oppertunistic encryption response.
:attr OpportunisticEncryptionRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'OpportunisticEncryptionRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a OpportunisticEncryptionResp object.
:param OpportunisticEncryptionRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'OpportunisticEncryptionResp':
"""Initialize a OpportunisticEncryptionResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = OpportunisticEncryptionRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in OpportunisticEncryptionResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in OpportunisticEncryptionResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in OpportunisticEncryptionResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in OpportunisticEncryptionResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OpportunisticEncryptionResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OpportunisticEncryptionResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OpportunisticEncryptionResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OpportunisticEncryptionResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OriginErrorPagePassThruResp():
"""
origin error page pass through response.
:attr OriginErrorPagePassThruRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'OriginErrorPagePassThruRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a OriginErrorPagePassThruResp object.
:param OriginErrorPagePassThruRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'OriginErrorPagePassThruResp':
"""Initialize a OriginErrorPagePassThruResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = OriginErrorPagePassThruRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in OriginErrorPagePassThruResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in OriginErrorPagePassThruResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in OriginErrorPagePassThruResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in OriginErrorPagePassThruResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OriginErrorPagePassThruResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OriginErrorPagePassThruResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OriginErrorPagePassThruResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OriginErrorPagePassThruResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PrefetchPreloadResp():
"""
Prefetch & Preload Response.
:attr PrefetchPreloadRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'PrefetchPreloadRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a PrefetchPreloadResp object.
:param PrefetchPreloadRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'PrefetchPreloadResp':
"""Initialize a PrefetchPreloadResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = PrefetchPreloadRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in PrefetchPreloadResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in PrefetchPreloadResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in PrefetchPreloadResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in PrefetchPreloadResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PrefetchPreloadResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PrefetchPreloadResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PrefetchPreloadResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PrefetchPreloadResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PseudoIpv4Resp():
"""
Pseudo ipv4 response.
:attr PseudoIpv4RespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'PseudoIpv4RespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a PseudoIpv4Resp object.
:param PseudoIpv4RespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'PseudoIpv4Resp':
"""Initialize a PseudoIpv4Resp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = PseudoIpv4RespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in PseudoIpv4Resp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in PseudoIpv4Resp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in PseudoIpv4Resp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in PseudoIpv4Resp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PseudoIpv4Resp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PseudoIpv4Resp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PseudoIpv4Resp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PseudoIpv4Resp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResponseBufferingResp():
"""
Buffering response.
:attr ResponseBufferingRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ResponseBufferingRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ResponseBufferingResp object.
:param ResponseBufferingRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResponseBufferingResp':
"""Initialize a ResponseBufferingResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ResponseBufferingRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ResponseBufferingResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ResponseBufferingResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ResponseBufferingResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ResponseBufferingResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResponseBufferingResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResponseBufferingResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResponseBufferingResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResponseBufferingResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ScriptLoadOptimizationResp():
"""
Script load optimization response.
:attr ScriptLoadOptimizationRespResult result: Container for response
information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ScriptLoadOptimizationRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ScriptLoadOptimizationResp object.
:param ScriptLoadOptimizationRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ScriptLoadOptimizationResp':
"""Initialize a ScriptLoadOptimizationResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ScriptLoadOptimizationRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ScriptLoadOptimizationResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ScriptLoadOptimizationResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ScriptLoadOptimizationResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ScriptLoadOptimizationResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ScriptLoadOptimizationResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ScriptLoadOptimizationResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ScriptLoadOptimizationResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ScriptLoadOptimizationResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SecurityHeaderResp():
"""
Response of Security Header.
:attr SecurityHeaderRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'SecurityHeaderRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a SecurityHeaderResp object.
:param SecurityHeaderRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'SecurityHeaderResp':
"""Initialize a SecurityHeaderResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = SecurityHeaderRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in SecurityHeaderResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in SecurityHeaderResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in SecurityHeaderResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in SecurityHeaderResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SecurityHeaderResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SecurityHeaderResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SecurityHeaderResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SecurityHeaderResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ServerSideExcludeResp():
"""
Response of server side exclude.
:attr ServerSideExcludeRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'ServerSideExcludeRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a ServerSideExcludeResp object.
:param ServerSideExcludeRespResult result: Container for response
information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'ServerSideExcludeResp':
"""Initialize a ServerSideExcludeResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = ServerSideExcludeRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ServerSideExcludeResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ServerSideExcludeResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ServerSideExcludeResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ServerSideExcludeResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ServerSideExcludeResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ServerSideExcludeResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ServerSideExcludeResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ServerSideExcludeResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TlsClientAuthResp():
"""
TLS Client authentication response.
:attr TlsClientAuthRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'TlsClientAuthRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a TlsClientAuthResp object.
:param TlsClientAuthRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'TlsClientAuthResp':
"""Initialize a TlsClientAuthResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = TlsClientAuthRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in TlsClientAuthResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in TlsClientAuthResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in TlsClientAuthResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in TlsClientAuthResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TlsClientAuthResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TlsClientAuthResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TlsClientAuthResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TlsClientAuthResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TrueClientIpResp():
"""
true client IP response.
:attr TrueClientIpRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'TrueClientIpRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a TrueClientIpResp object.
:param TrueClientIpRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'TrueClientIpResp':
"""Initialize a TrueClientIpResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = TrueClientIpRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in TrueClientIpResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in TrueClientIpResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in TrueClientIpResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in TrueClientIpResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TrueClientIpResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TrueClientIpResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TrueClientIpResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TrueClientIpResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class WafResp():
"""
WAF Response.
:attr WafRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'WafRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a WafResp object.
:param WafRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'WafResp':
"""Initialize a WafResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = WafRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in WafResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in WafResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in WafResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in WafResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a WafResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this WafResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'WafResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'WafResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class WebsocketsResp():
"""
Websocket Response.
:attr WebsocketsRespResult result: Container for response information.
:attr bool success: Was the get successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
"""
def __init__(self,
result: 'WebsocketsRespResult',
success: bool,
errors: List[List[str]],
messages: List[List[str]]) -> None:
"""
Initialize a WebsocketsResp object.
:param WebsocketsRespResult result: Container for response information.
:param bool success: Was the get successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
"""
self.result = result
self.success = success
self.errors = errors
self.messages = messages
@classmethod
def from_dict(cls, _dict: Dict) -> 'WebsocketsResp':
"""Initialize a WebsocketsResp object from a json dictionary."""
args = {}
if 'result' in _dict:
args['result'] = WebsocketsRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in WebsocketsResp JSON')
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in WebsocketsResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in WebsocketsResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in WebsocketsResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a WebsocketsResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this WebsocketsResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'WebsocketsResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'WebsocketsResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZonesCnameFlatteningResp():
"""
Zones CNAME flattening response.
:attr bool success: Was operation successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
:attr CnameFlatteningResponse result: CNAME Flattening response.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: 'CnameFlatteningResponse') -> None:
"""
Initialize a ZonesCnameFlatteningResp object.
:param bool success: Was operation successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
:param CnameFlatteningResponse result: CNAME Flattening response.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZonesCnameFlatteningResp':
"""Initialize a ZonesCnameFlatteningResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ZonesCnameFlatteningResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ZonesCnameFlatteningResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ZonesCnameFlatteningResp JSON')
if 'result' in _dict:
args['result'] = CnameFlatteningResponse.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ZonesCnameFlatteningResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZonesCnameFlatteningResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZonesCnameFlatteningResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZonesCnameFlatteningResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZonesCnameFlatteningResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZonesDnssecResp():
"""
Zones DNS Sec Response.
:attr bool success: Was operation successful.
:attr List[List[str]] errors: Array of errors encountered.
:attr List[List[str]] messages: Array of messages returned.
:attr ZonesDnssecRespResult result: Container for response information.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: 'ZonesDnssecRespResult') -> None:
"""
Initialize a ZonesDnssecResp object.
:param bool success: Was operation successful.
:param List[List[str]] errors: Array of errors encountered.
:param List[List[str]] messages: Array of messages returned.
:param ZonesDnssecRespResult result: Container for response information.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZonesDnssecResp':
"""Initialize a ZonesDnssecResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ZonesDnssecResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ZonesDnssecResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ZonesDnssecResp JSON')
if 'result' in _dict:
args['result'] = ZonesDnssecRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ZonesDnssecResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZonesDnssecResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZonesDnssecResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZonesDnssecResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZonesDnssecResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
| 38.884999
| 150
| 0.604569
| 35,593
| 327,645
| 5.383278
| 0.012727
| 0.019624
| 0.014138
| 0.018183
| 0.920457
| 0.905113
| 0.869138
| 0.859207
| 0.834829
| 0.829156
| 0
| 0.002047
| 0.293223
| 327,645
| 8,426
| 151
| 38.884999
| 0.825372
| 0.249389
| 0
| 0.835112
| 0
| 0
| 0.170423
| 0.051377
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117556
| false
| 0.00388
| 0.001746
| 0
| 0.2516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43056b0d566f588438b9393018b911461d84819a
| 56
|
py
|
Python
|
neuron_ml/core/__init__.py
|
fossabot/Neuron
|
ee8b328411bddb9c86675914b0e0b50250fb7ff9
|
[
"MIT"
] | 9
|
2018-12-18T06:19:09.000Z
|
2021-11-22T19:46:13.000Z
|
neuron_ml/core/__init__.py
|
fossabot/Neuron
|
ee8b328411bddb9c86675914b0e0b50250fb7ff9
|
[
"MIT"
] | 20
|
2018-11-23T16:09:04.000Z
|
2022-02-10T00:06:17.000Z
|
neuron_ml/core/__init__.py
|
fossabot/Neuron
|
ee8b328411bddb9c86675914b0e0b50250fb7ff9
|
[
"MIT"
] | 1
|
2019-02-25T11:58:20.000Z
|
2019-02-25T11:58:20.000Z
|
import neuron_ml.core.data
import neuron_ml.core.public
| 18.666667
| 28
| 0.857143
| 10
| 56
| 4.6
| 0.6
| 0.521739
| 0.608696
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 56
| 2
| 29
| 28
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4ac2ac18bf7147ae9819601842a86c49c095127b
| 9,773
|
py
|
Python
|
Admin-Vulnerability-Finder/adminscan_dec.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 337
|
2020-08-15T12:22:14.000Z
|
2022-03-29T06:05:15.000Z
|
Admin-Vulnerability-Finder/adminscan_dec.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 3
|
2020-11-12T14:30:48.000Z
|
2021-05-18T16:56:22.000Z
|
Admin-Vulnerability-Finder/adminscan_dec.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 83
|
2020-08-15T00:22:58.000Z
|
2022-03-31T08:40:23.000Z
|
# Decompiled by HTR-TECH | TAHMID RAYAT
#---------------------------------------
# Source File : 1
# Time : Wed May 12 05:43:31 2021
#---------------------------------------
# uncompyle6 version 3.7.4
# Python bytecode 2.7
# Decompiled from: Python 2.7.16 (default, Oct 10 2019, 22:02:15)
# [GCC 8.3.0]
# Embedded file name: sumarr
import sys, httplib, socket, os, time
os.system('clear')
def logop(z):
for word in z + '\n':
sys.stdout.write(word)
sys.stdout.flush()
time.sleep(0.001)
def logop2(z):
for word in z + '\n':
sys.stdout.write(word)
sys.stdout.flush()
time.sleep(0.01)
banner1 = '\x1b[91m\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \n\x1b[91m\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \n\x1b[91m\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81 \n\x1b[91m\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \n\x1b[91m\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81 \n \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \n \n \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \n \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\n \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\n \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\n \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\xe2\x96\x88\xe2\x96\x88\xe2\x80\x81 \xe2\x96\x88\xe2\x96\x88\xe2\x80\x81\n \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\xe2\x80\x81 \xe2\x80\x81\xe2\x80\x81\xe2\x80\x81\n '
banner2 = '\x1b[93m\n Author : Dark Hunter 141\n Tool : Web Hunter\n Version : 1.0\n Github : https://github.com/darkhunter141\n Facebook : https://www.facebook.com/darkhunter141\n Devolopers: Ashrafi Abir (DarkXploit)\n Tanvir Mahamud Shariful (DarkWolf)'
line = '\x1b[94m===================================================================================================================================='
class darkxploit:
HEADER = '\x1b[95m'
BLUE = '\x1b[94m'
GREEN = '\x1b[92m'
YELLOW = '\x1b[93m'
RED = '\x1b[91m'
ENDC = '\x1b[0m'
cyan = '\x1b[96m'
class adminfinder:
logop(banner1)
logop2(banner2)
print ''
print ''
logop(line)
print ''
print ''
logop('\x1b[93m \xe2\x9c\xae Admin Scanner \xe2\x9c\xae')
def __init__(self):
self.admin_locate()
def admin_locate(self):
try:
try:
print ''
print ''
site = raw_input(darkxploit.cyan + '[\xe2\x82\xac]Enter the Web Site URL (www.site.com): ' + darkxploit.GREEN)
dork = raw_input(darkxploit.cyan + 'Enter The Admin Directory [admin,administrator etc]: ' + darkxploit.GREEN)
site = site.replace('http://', '')
print darkxploit.YELLOW + '\n\t[*] Checking the website ' + site + darkxploit.ENDC
conn = httplib.HTTPConnection(site)
conn.connect()
print darkxploit.GREEN + "\t[+] Connection Established, It's Online.\n" + darkxploit.ENDC
except (httplib.HTTPResponse, socket.error) as Exit:
print darkxploit.RED + '\t[!] Cannot Connect the Website, It might be offline or invalid URL.\n' + darkxploit.ENDC
sys.exit()
print darkxploit.YELLOW + '\t[*] Scanning: ' + site + darkxploit.ENDC + '\n'
wordfile = open('scan.txt', 'r')
wordlist = wordfile.readlines()
wordfile.close()
for word in wordlist:
admin = word.strip('\n')
admin = '/' + dork + '/' + admin
target = site + admin
print darkxploit.YELLOW + '[\xe2\x9c\x93] Checking: ' + target + darkxploit.ENDC
connection = httplib.HTTPConnection(site)
connection.request('GET', admin)
response = connection.getresponse()
if response.status == 200:
print darkxploit.GREEN + '' + darkxploit.ENDC
print '%s %s' % (darkxploit.GREEN + '\t[\xe2\x9c\x93] Admin Page Found >> ' + darkxploit.ENDC, darkxploit.GREEN + target + darkxploit.ENDC)
print darkxploit.GREEN + '' + darkxploit.ENDC
raw_input(darkxploit.YELLOW + '[$] Press enter to continue.\n' + darkxploit.ENDC)
elif response.status == 302:
print darkxploit.RED + '[!] 302 Object moved temporarily.\n' + darkxploit.ENDC
elif response.status == 404:
print darkxploit.RED + '[\xc3\x97] 404 Web Page Not Found.\n' + darkxploit.ENDC
elif response.status == 410:
print darkxploit.RED + '[!] 410 Object removed permanently.\n' + darkxploit.ENDC
else:
print '%s %s %s %s' % (darkxploit.RED + 'Unknown Response: ' + darkxploit.ENDC, darkxploit.RED + response.status + darkxploit.ENDC, '\n', darkxploit.RED + host + darkxploit.ENDC)
connection.close()
except (httplib.HTTPResponse, socket.error):
print darkxploit.RED + '\n\t[!] Session Cancelled, An Error Occured.' + darkxploit.ENDC
print darkxploit.RED + '\t[!] Check Your Internet Connection' + darkxploit.ENDC
except (KeyboardInterrupt, SystemExit):
print darkxploit.RED + '\t[!] Session Interrupted and Cancelled.' + darkxploit.ENDC
if __name__ == '__main__':
adminfinder()
| 91.336449
| 5,108
| 0.618234
| 1,683
| 9,773
| 3.579917
| 0.120024
| 0.21112
| 0.316681
| 0.422241
| 0.641328
| 0.618091
| 0.60166
| 0.60166
| 0.60166
| 0.60166
| 0
| 0.259375
| 0.181418
| 9,773
| 107
| 5,109
| 91.336449
| 0.49375
| 0.031925
| 0
| 0.195122
| 0
| 0.02439
| 0.669418
| 0.511111
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012195
| null | null | 0.256098
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4ac5bd0a7fed59f1c7822d9f9e1e3cfcff488a2c
| 3,763
|
py
|
Python
|
nb2plots/tests/test_codelinks.py
|
codejamninja/nb2plots
|
0d1f7f1de08ea1bf52398773cf71456f5a0d7550
|
[
"BSD-2-Clause"
] | null | null | null |
nb2plots/tests/test_codelinks.py
|
codejamninja/nb2plots
|
0d1f7f1de08ea1bf52398773cf71456f5a0d7550
|
[
"BSD-2-Clause"
] | null | null | null |
nb2plots/tests/test_codelinks.py
|
codejamninja/nb2plots
|
0d1f7f1de08ea1bf52398773cf71456f5a0d7550
|
[
"BSD-2-Clause"
] | null | null | null |
""" Test code-links directive """
from os.path import isfile, join as pjoin
import re
from nb2plots.converters import to_pxml
from nb2plots.testing import PlotsBuilder
def test_codelinks():
def as_pxml(rst_text):
return to_pxml.from_rst(rst_text, resolve=False)
page = """\
Text here
.. code-links::
More text here."""
both_re = re.compile("""\
<document source=".*?">
<paragraph>
Text here
<code_links>
<bullet_list bullet="\*">
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents.py" reftype="pyfile">
Download this page as a Python code file
;
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents.ipynb" reftype="clearnotebook">
Download this page as a Jupyter notebook \(no outputs\)
;
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents_full.ipynb" reftype="fullnotebook">
Download this page as a Jupyter notebook \(with outputs\)
.
<paragraph>
More text here.""")
pxml = as_pxml(page)
assert both_re.match(pxml)
# Default is 'both'
page = """\
Text here
.. code-links:: python clear full
More text here."""
pxml = as_pxml(page)
assert both_re.match(pxml)
page = """\
Text here
.. code-links:: clear
More text here."""
pxml = as_pxml(page)
assert re.match("""\
<document source=".*?">
<paragraph>
Text here
<code_links>
<bullet_list bullet="\*">
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents.ipynb" reftype="clearnotebook">
Download this page as a Jupyter notebook \(no outputs\)
.
<paragraph>
More text here.""" , pxml)
page = """\
Text here
.. code-links:: full
More text here."""
pxml = as_pxml(page)
assert re.match("""\
<document source=".*?">
<paragraph>
Text here
<code_links>
<bullet_list bullet="\*">
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents_full.ipynb" reftype="fullnotebook">
Download this page as a Jupyter notebook \(with outputs\)
.
<paragraph>
More text here.""", pxml)
page = """\
Text here
.. code-links:: full python
More text here."""
pxml = as_pxml(page)
assert re.match("""\
<document source=".*?">
<paragraph>
Text here
<code_links>
<bullet_list bullet="\*">
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents_full.ipynb" reftype="fullnotebook">
Download this page as a Jupyter notebook \(with outputs\)
;
<list_item>
<paragraph>
<runrole_reference refdoc="contents" reftarget="/contents.py" reftype="pyfile">
Download this page as a Python code file
.
<paragraph>
More text here.""", pxml)
class TestSubdirCodeLinks(PlotsBuilder):
""" Test output file locations for code-links directive.
"""
rst_sources = {'foo/a_page': """\
A section
#########
.. code-links::
More text.
"""}
def test_output(self):
for suffix in ('.py', '.ipynb', '_full.ipynb'):
assert isfile(pjoin(self.out_dir, 'foo', 'a_page' + suffix))
| 26.878571
| 113
| 0.546107
| 385
| 3,763
| 5.218182
| 0.2
| 0.071677
| 0.053758
| 0.076157
| 0.764062
| 0.730712
| 0.720259
| 0.720259
| 0.720259
| 0.718268
| 0
| 0.000794
| 0.330853
| 3,763
| 139
| 114
| 27.071942
| 0.797061
| 0.027372
| 0
| 0.738739
| 0
| 0
| 0.760077
| 0.086372
| 0
| 0
| 0
| 0
| 0.054054
| 1
| 0.027027
| false
| 0
| 0.036036
| 0.009009
| 0.09009
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43624f4dce10178af59e0b9906808df196178902
| 4,662
|
py
|
Python
|
tests/test_mapbox.py
|
sackh/maps-cli
|
64cc1877518c88bc9b885ebc22580b595bee6fcc
|
[
"MIT"
] | 5
|
2021-01-21T08:19:43.000Z
|
2021-12-12T06:20:53.000Z
|
tests/test_mapbox.py
|
sackh/maps-cli
|
64cc1877518c88bc9b885ebc22580b595bee6fcc
|
[
"MIT"
] | null | null | null |
tests/test_mapbox.py
|
sackh/maps-cli
|
64cc1877518c88bc9b885ebc22580b595bee6fcc
|
[
"MIT"
] | null | null | null |
"""Module to test MapBox services."""
import os
from click.testing import CliRunner
from maps.commands import maps
def test_show():
"""Test mapbox show command."""
runner = CliRunner()
result = runner.invoke(maps, ["mapbox", "show"], catch_exceptions=False)
assert result.output == "geocoding\nisochrone\nmatrix\n"
def test_geocoding_fwd():
runner = CliRunner()
result = runner.invoke(
maps,
["mapbox", "geocoding", "--forward", "springfield"],
catch_exceptions=False,
)
assert result.exit_code == 0
assert result.output == '{\n "lat": 37.2153,\n "lon": -93.2983\n}\n'
raw_result = runner.invoke(
maps,
["mapbox", "geocoding", "--forward", "springfield", "--raw"],
catch_exceptions=False,
)
assert raw_result.exit_code == 0
def test_geocoding_reverse():
runner = CliRunner()
result = runner.invoke(
maps,
["mapbox", "geocoding", "--reverse", "19.16153,72.85618"],
catch_exceptions=False,
)
assert result.exit_code == 0
assert "Haptik, 8th Floor" in result.output
raw_result = runner.invoke(
maps,
["mapbox", "geocoding", "--reverse", "19.16153,72.85618", "--raw"],
catch_exceptions=False,
)
assert raw_result.exit_code == 0
def test_geocoding_exception():
api_key = os.environ["MAPBOX_APIKEY"]
try:
del os.environ["MAPBOX_APIKEY"]
runner = CliRunner()
result = runner.invoke(
maps,
["mapbox", "geocoding", "--forward", "springfield"],
catch_exceptions=False,
)
finally:
os.environ["MAPBOX_APIKEY"] = api_key
assert result.exit_code == 2
def test_isochrone():
runner = CliRunner()
result = runner.invoke(
maps,
[
"mapbox",
"isochrone",
"--profile=driving",
"--coordinates=-118.22258,33.99038",
"--contours_minutes=5",
"--contours_colors=6706ce",
"--polygons",
],
catch_exceptions=False,
)
assert result.exit_code == 0
assert "FeatureCollection" in result.output
def test_isochrone_exception():
api_key = os.environ["MAPBOX_APIKEY"]
try:
del os.environ["MAPBOX_APIKEY"]
runner = CliRunner()
result = runner.invoke(
maps,
[
"mapbox",
"isochrone",
"--profile=driving",
"--coordinates=-118.22258,33.99038",
"--contours_minutes=5",
"--contours_colors=6706ce",
"--polygons",
],
catch_exceptions=False,
)
finally:
os.environ["MAPBOX_APIKEY"] = api_key
assert result.exit_code == 2
def test_matrix():
runner = CliRunner()
result = runner.invoke(
maps,
[
"mapbox",
"matrix",
"--profile=driving",
"--coordinates=-122.42,37.78;-122.45,37.91;-122.48,37.73",
"--annotations=distance,duration",
"--approaches=curb;curb;curb",
"--destinations=all",
],
catch_exceptions=False,
)
assert result.exit_code == 0
assert '"code": "Ok"' in result.output
def test_matrix_exception():
api_key = os.environ["MAPBOX_APIKEY"]
try:
del os.environ["MAPBOX_APIKEY"]
runner = CliRunner()
result = runner.invoke(
maps,
[
"mapbox",
"matrix",
"--profile=driving",
"--coordinates=-122.42,37.78;-122.45,37.91;-122.48,37.73",
"--annotations=distance,duration",
"--approaches=curb;curb;curb",
"--destinations=all",
],
catch_exceptions=False,
)
finally:
os.environ["MAPBOX_APIKEY"] = api_key
assert result.exit_code == 2
def test_mock_display(mocker):
mocker.patch("maps.mapbox.geo_display", return_value=True)
runner = CliRunner()
result = runner.invoke(
maps,
[
"mapbox",
"isochrone",
"--profile=driving",
"--coordinates=-118.22258,33.99038",
"--contours_minutes=5",
"--contours_colors=6706ce",
"--polygons",
"--display",
],
catch_exceptions=False,
)
assert result.exit_code == 0
runner = CliRunner()
result = runner.invoke(
maps,
["mapbox", "geocoding", "--forward", "springfield", "--display"],
catch_exceptions=False,
)
| 26.947977
| 76
| 0.538181
| 453
| 4,662
| 5.397351
| 0.216336
| 0.05317
| 0.088344
| 0.107975
| 0.837219
| 0.806953
| 0.806953
| 0.786912
| 0.741513
| 0.689162
| 0
| 0.052001
| 0.319391
| 4,662
| 172
| 77
| 27.104651
| 0.718563
| 0.012227
| 0
| 0.733333
| 0
| 0.02
| 0.260557
| 0.097954
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.06
| false
| 0
| 0.02
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43dc538f79a12a8ccc73ae70728440d20d168038
| 11,066
|
py
|
Python
|
sdk/python/pulumi_aws/athena/workgroup.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/athena/workgroup.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/athena/workgroup.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Workgroup(pulumi.CustomResource):
arn: pulumi.Output[str]
"""
Amazon Resource Name (ARN) of the workgroup
"""
configuration: pulumi.Output[dict]
"""
Configuration block with various settings for the workgroup. Documented below.
* `bytesScannedCutoffPerQuery` (`float`) - Integer for the upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. Must be at least `10485760`.
* `enforceWorkgroupConfiguration` (`bool`) - Boolean whether the settings for the workgroup override client-side settings. For more information, see [Workgroup Settings Override Client-Side Settings](https://docs.aws.amazon.com/athena/latest/ug/workgroups-settings-override.html). Defaults to `true`.
* `publishCloudwatchMetricsEnabled` (`bool`) - Boolean whether Amazon CloudWatch metrics are enabled for the workgroup. Defaults to `true`.
* `resultConfiguration` (`dict`) - Configuration block with result settings. Documented below.
* `encryption_configuration` (`dict`) - Configuration block with encryption settings. Documented below.
* `encryptionOption` (`str`) - Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (`SSE_S3`), server-side encryption with KMS-managed keys (`SSE_KMS`), or client-side encryption with KMS-managed keys (`CSE_KMS`) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
* `kms_key_arn` (`str`) - For `SSE_KMS` and `CSE_KMS`, this is the KMS key Amazon Resource Name (ARN).
* `output_location` (`str`) - The location in Amazon S3 where your query results are stored, such as `s3://path/to/query/bucket/`. For more information, see [Queries and Query Result Files](https://docs.aws.amazon.com/athena/latest/ug/querying.html).
"""
description: pulumi.Output[str]
"""
Description of the workgroup.
"""
force_destroy: pulumi.Output[bool]
"""
The option to delete the workgroup and its contents even if the workgroup contains any named queries.
"""
name: pulumi.Output[str]
"""
Name of the workgroup.
"""
state: pulumi.Output[str]
"""
State of the workgroup. Valid values are `DISABLED` or `ENABLED`. Defaults to `ENABLED`.
"""
tags: pulumi.Output[dict]
"""
Key-value mapping of resource tags for the workgroup.
"""
def __init__(__self__, resource_name, opts=None, configuration=None, description=None, force_destroy=None, name=None, state=None, tags=None, __props__=None, __name__=None, __opts__=None):
"""
Provides an Athena Workgroup.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[dict] configuration: Configuration block with various settings for the workgroup. Documented below.
:param pulumi.Input[str] description: Description of the workgroup.
:param pulumi.Input[bool] force_destroy: The option to delete the workgroup and its contents even if the workgroup contains any named queries.
:param pulumi.Input[str] name: Name of the workgroup.
:param pulumi.Input[str] state: State of the workgroup. Valid values are `DISABLED` or `ENABLED`. Defaults to `ENABLED`.
:param pulumi.Input[dict] tags: Key-value mapping of resource tags for the workgroup.
The **configuration** object supports the following:
* `bytesScannedCutoffPerQuery` (`pulumi.Input[float]`) - Integer for the upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. Must be at least `10485760`.
* `enforceWorkgroupConfiguration` (`pulumi.Input[bool]`) - Boolean whether the settings for the workgroup override client-side settings. For more information, see [Workgroup Settings Override Client-Side Settings](https://docs.aws.amazon.com/athena/latest/ug/workgroups-settings-override.html). Defaults to `true`.
* `publishCloudwatchMetricsEnabled` (`pulumi.Input[bool]`) - Boolean whether Amazon CloudWatch metrics are enabled for the workgroup. Defaults to `true`.
* `resultConfiguration` (`pulumi.Input[dict]`) - Configuration block with result settings. Documented below.
* `encryption_configuration` (`pulumi.Input[dict]`) - Configuration block with encryption settings. Documented below.
* `encryptionOption` (`pulumi.Input[str]`) - Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (`SSE_S3`), server-side encryption with KMS-managed keys (`SSE_KMS`), or client-side encryption with KMS-managed keys (`CSE_KMS`) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
* `kms_key_arn` (`pulumi.Input[str]`) - For `SSE_KMS` and `CSE_KMS`, this is the KMS key Amazon Resource Name (ARN).
* `output_location` (`pulumi.Input[str]`) - The location in Amazon S3 where your query results are stored, such as `s3://path/to/query/bucket/`. For more information, see [Queries and Query Result Files](https://docs.aws.amazon.com/athena/latest/ug/querying.html).
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['configuration'] = configuration
__props__['description'] = description
__props__['force_destroy'] = force_destroy
__props__['name'] = name
__props__['state'] = state
__props__['tags'] = tags
__props__['arn'] = None
super(Workgroup, __self__).__init__(
'aws:athena/workgroup:Workgroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, arn=None, configuration=None, description=None, force_destroy=None, name=None, state=None, tags=None):
"""
Get an existing Workgroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of the workgroup
:param pulumi.Input[dict] configuration: Configuration block with various settings for the workgroup. Documented below.
:param pulumi.Input[str] description: Description of the workgroup.
:param pulumi.Input[bool] force_destroy: The option to delete the workgroup and its contents even if the workgroup contains any named queries.
:param pulumi.Input[str] name: Name of the workgroup.
:param pulumi.Input[str] state: State of the workgroup. Valid values are `DISABLED` or `ENABLED`. Defaults to `ENABLED`.
:param pulumi.Input[dict] tags: Key-value mapping of resource tags for the workgroup.
The **configuration** object supports the following:
* `bytesScannedCutoffPerQuery` (`pulumi.Input[float]`) - Integer for the upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. Must be at least `10485760`.
* `enforceWorkgroupConfiguration` (`pulumi.Input[bool]`) - Boolean whether the settings for the workgroup override client-side settings. For more information, see [Workgroup Settings Override Client-Side Settings](https://docs.aws.amazon.com/athena/latest/ug/workgroups-settings-override.html). Defaults to `true`.
* `publishCloudwatchMetricsEnabled` (`pulumi.Input[bool]`) - Boolean whether Amazon CloudWatch metrics are enabled for the workgroup. Defaults to `true`.
* `resultConfiguration` (`pulumi.Input[dict]`) - Configuration block with result settings. Documented below.
* `encryption_configuration` (`pulumi.Input[dict]`) - Configuration block with encryption settings. Documented below.
* `encryptionOption` (`pulumi.Input[str]`) - Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (`SSE_S3`), server-side encryption with KMS-managed keys (`SSE_KMS`), or client-side encryption with KMS-managed keys (`CSE_KMS`) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
* `kms_key_arn` (`pulumi.Input[str]`) - For `SSE_KMS` and `CSE_KMS`, this is the KMS key Amazon Resource Name (ARN).
* `output_location` (`pulumi.Input[str]`) - The location in Amazon S3 where your query results are stored, such as `s3://path/to/query/bucket/`. For more information, see [Queries and Query Result Files](https://docs.aws.amazon.com/athena/latest/ug/querying.html).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["arn"] = arn
__props__["configuration"] = configuration
__props__["description"] = description
__props__["force_destroy"] = force_destroy
__props__["name"] = name
__props__["state"] = state
__props__["tags"] = tags
return Workgroup(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 71.393548
| 508
| 0.707121
| 1,421
| 11,066
| 5.370162
| 0.151302
| 0.055039
| 0.027257
| 0.02385
| 0.800419
| 0.787053
| 0.781287
| 0.772114
| 0.763203
| 0.747608
| 0
| 0.004521
| 0.200434
| 11,066
| 154
| 509
| 71.857143
| 0.857934
| 0.509308
| 0
| 0.033898
| 1
| 0
| 0.137602
| 0.010612
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067797
| false
| 0.016949
| 0.101695
| 0.033898
| 0.355932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78e74265a7ae4d16777fed6dfb1ab57ec045052f
| 3,876
|
py
|
Python
|
marketing/search_indexes.py
|
sauravpanda/Django-CRM
|
c6b8cde02c9cf3d3f30f4e05b825f77d00734e87
|
[
"MIT"
] | 1
|
2021-08-23T05:25:30.000Z
|
2021-08-23T05:25:30.000Z
|
marketing/search_indexes.py
|
MrNevil/Django-CRM
|
8cb9803748bb3e03f843c47413232185f78261f2
|
[
"MIT"
] | null | null | null |
marketing/search_indexes.py
|
MrNevil/Django-CRM
|
8cb9803748bb3e03f843c47413232185f78261f2
|
[
"MIT"
] | 1
|
2021-03-25T04:01:27.000Z
|
2021-03-25T04:01:27.000Z
|
from haystack import indexes
from marketing.models import Contact, FailedContact
class MarketingContactIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(
document=True, use_template=True, template_name="search/contact_emails.txt"
)
id = indexes.CharField(model_attr="id")
email = indexes.EdgeNgramField(model_attr="email")
email_domain = indexes.EdgeNgramField()
name = indexes.CharField(model_attr="name")
company_name = indexes.CharField()
created_on = indexes.CharField(model_attr="created_on")
created_on_arrow = indexes.CharField(model_attr="created_on_arrow")
created_by = indexes.CharField()
created_by_id = indexes.CharField()
contact_lists = indexes.MultiValueField()
contact_lists_id = indexes.MultiValueField()
contact_lists_name = indexes.MultiValueField()
is_bounced = indexes.BooleanField()
def get_model(self):
return Contact
def prepare_email_domain(self, obj):
return obj.email.split("@")[-1]
def prepare_contact_lists(self, obj):
return [
[contact_list.id, contact_list.name if contact_list.name else ""]
for contact_list in obj.contact_list.all()
]
def prepare_contact_lists_id(self, obj):
return [
contact_list.id for contact_list in obj.contact_list.all().order_by("id")
]
def prepare_contact_lists_name(self, obj):
return [
contact_list.name for contact_list in obj.contact_list.all().order_by("id")
]
def prepare_company_name(self, obj):
return obj.company_name if obj.company_name else ""
def prepare_created_by(self, obj):
return obj.created_by.email if obj.created_by else ""
def prepare_created_by_id(self, obj):
return obj.created_by.id if obj.created_by else ""
def prepare_is_bounced(self, obj):
return obj.is_bounced
def index_queryset(self, using=None):
return self.get_model().objects.all()
class MarketingFailedContactIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(
document=True,
use_template=True,
template_name="search/failed_contact_emails.txt",
)
id = indexes.CharField(model_attr="id")
email = indexes.EdgeNgramField(model_attr="email")
email = indexes.EdgeNgramField()
name = indexes.CharField(model_attr="name")
company_name = indexes.CharField()
created_on = indexes.CharField(model_attr="created_on")
created_on_arrow = indexes.CharField(model_attr="created_on_arrow")
created_by = indexes.CharField()
created_by_id = indexes.CharField()
contact_lists = indexes.MultiValueField()
contact_lists_id = indexes.MultiValueField()
contact_lists_name = indexes.MultiValueField()
def get_model(self):
return FailedContact
def prepare_email_domain(self, obj):
return obj.email.split("@")[-1]
def prepare_contact_lists(self, obj):
return [
[contact_list.id, contact_list.name if contact_list.name else ""]
for contact_list in obj.contact_list.all()
]
def prepare_contact_lists_id(self, obj):
return [
contact_list.id for contact_list in obj.contact_list.all().order_by("id")
]
def prepare_contact_lists_name(self, obj):
return [
contact_list.name for contact_list in obj.contact_list.all().order_by("id")
]
def prepare_company_name(self, obj):
return obj.company_name if obj.company_name else ""
def prepare_created_by(self, obj):
return obj.created_by.email if obj.created_by else ""
def prepare_created_by_id(self, obj):
return obj.created_by.id if obj.created_by else ""
def index_queryset(self, using=None):
return self.get_model().objects.all()
| 33.704348
| 87
| 0.690918
| 492
| 3,876
| 5.178862
| 0.121951
| 0.094976
| 0.076531
| 0.056515
| 0.912873
| 0.896389
| 0.896389
| 0.893642
| 0.893642
| 0.893642
| 0
| 0.000653
| 0.209494
| 3,876
| 114
| 88
| 34
| 0.83094
| 0
| 0
| 0.741573
| 0
| 0
| 0.036378
| 0.014706
| 0
| 0
| 0
| 0
| 0
| 1
| 0.213483
| false
| 0
| 0.022472
| 0.213483
| 0.775281
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
601774c2064f5f5932bc5283f104fda0577e31f0
| 3,016
|
py
|
Python
|
unittests/test_hydrationparameter.py
|
ongjj323/DNPLab
|
09fd9f21c3c48a3f122d0b0295cc982f689a9842
|
[
"MIT"
] | null | null | null |
unittests/test_hydrationparameter.py
|
ongjj323/DNPLab
|
09fd9f21c3c48a3f122d0b0295cc982f689a9842
|
[
"MIT"
] | null | null | null |
unittests/test_hydrationparameter.py
|
ongjj323/DNPLab
|
09fd9f21c3c48a3f122d0b0295cc982f689a9842
|
[
"MIT"
] | null | null | null |
import unittest
from dnplab.dnpHydration import HydrationParameter
class TestHydrationParameter(unittest.TestCase):
def test_default_values(self):
hp = HydrationParameter()
self.assertEqual(hp.field, None)
self.assertEqual(hp.spin_C, None)
self.assertEqual(hp.T10, None)
self.assertEqual(hp.T100, None)
self.assertEqual(hp.smax_model, "tethered")
self.assertEqual(hp.ksigma_bulk, 95.4)
self.assertEqual(hp.tcorr_bulk, 54)
self.assertEqual(hp.D_H2O, 2.3e-9)
self.assertEqual(hp.D_SL, 4.1e-10)
self.assertEqual(hp.klow_bulk, 366)
self.assertEqual(hp.t1_interp_method, "second_order")
def test_struct_like(self):
hp = HydrationParameter()
hp.field = 300
hp.spin_C = 200
hp.ksigma_bulk = 95
hp.T10 = 1
hp.T100 = 2
hp.tcorr_bulk = 50
hp.D_H2O = 3e-10
hp.D_SL = 5e-10
hp.klow_bulk = 350
hp.smax_model = "free"
hp.t1_interp_method = "second_order"
self.assertEqual(hp["field"], hp.field)
self.assertEqual(hp["spin_C"], hp.spin_C)
self.assertEqual(hp["ksigma_bulk"], hp.ksigma_bulk)
self.assertEqual(hp["T10"], hp.T10)
self.assertEqual(hp["T100"], hp.T100)
self.assertEqual(hp["tcorr_bulk"], hp.tcorr_bulk)
self.assertEqual(hp["D_H2O"], hp.D_H2O)
self.assertEqual(hp["D_SL"], hp.D_SL)
self.assertEqual(hp["klow_bulk"], hp.klow_bulk)
self.assertEqual(hp["smax_model"], hp.smax_model)
self.assertEqual(hp["t1_interp_method"], hp.t1_interp_method)
with self.assertRaises(ValueError):
hp.smax_model = "notinlist"
with self.assertRaises(ValueError):
hp.t1_interp_method = "notinlist"
def test_dict_like(self):
hp = HydrationParameter()
hp["field"] = 300
hp["spin_C"] = 200
hp["ksigma_bulk"] = 95
hp["T10"] = 1
hp["T100"] = 2
hp["tcorr_bulk"] = 50
hp["D_H2O"] = 3e-10
hp["D_SL"] = 5e-10
hp["klow_bulk"] = 350
hp["smax_model"] = "free"
hp["t1_interp_method"] = "second_order"
self.assertEqual(hp["field"], hp.field)
self.assertEqual(hp["spin_C"], hp.spin_C)
self.assertEqual(hp["ksigma_bulk"], hp.ksigma_bulk)
self.assertEqual(hp["T10"], hp.T10)
self.assertEqual(hp["T100"], hp.T100)
self.assertEqual(hp["tcorr_bulk"], hp.tcorr_bulk)
self.assertEqual(hp["D_H2O"], hp.D_H2O)
self.assertEqual(hp["D_SL"], hp.D_SL)
self.assertEqual(hp["klow_bulk"], hp.klow_bulk)
self.assertEqual(hp["smax_model"], hp.smax_model)
self.assertEqual(hp["t1_interp_method"], hp.t1_interp_method)
with self.assertRaises(ValueError):
hp["smax_model"] = "notinlist"
with self.assertRaises(ValueError):
hp["t1_interp_method"] = "notinlist"
if __name__ == "__main__":
unittest.main()
| 33.142857
| 69
| 0.61008
| 399
| 3,016
| 4.39599
| 0.157895
| 0.282212
| 0.31984
| 0.082098
| 0.830673
| 0.735462
| 0.711517
| 0.711517
| 0.711517
| 0.711517
| 0
| 0.047788
| 0.250663
| 3,016
| 90
| 70
| 33.511111
| 0.728319
| 0
| 0
| 0.391892
| 0
| 0
| 0.123011
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.040541
| false
| 0
| 0.027027
| 0
| 0.081081
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
60567d43a7141d5693ec782567d397be1891e0f6
| 987
|
py
|
Python
|
app/Main/SYS/Comparators.py
|
fineans/Vython
|
6243043d4d5993fa03a91a254e96c6d5746848d4
|
[
"MIT"
] | null | null | null |
app/Main/SYS/Comparators.py
|
fineans/Vython
|
6243043d4d5993fa03a91a254e96c6d5746848d4
|
[
"MIT"
] | null | null | null |
app/Main/SYS/Comparators.py
|
fineans/Vython
|
6243043d4d5993fa03a91a254e96c6d5746848d4
|
[
"MIT"
] | null | null | null |
from rply.token import BaseBox
class Comparators(BaseBox):
def __init__(self, left, right):
self.left = left
self.right = right
self.value = False
class Egal(Comparators):
def eval(self):
if self.left.eval() == self.right.eval():
return True
else:
return False
class Less(Comparators):
def eval(self):
if self.left.eval() < self.right.eval():
return True
else:
return False
class More(Comparators):
def eval(self):
if self.left.eval() > self.right.eval():
return True
else:
return False
class LessOrEgal(Comparators):
def eval(self):
if self.left.eval() <= self.right.eval():
return True
else:
return False
class MoreOrEgal(Comparators):
def eval(self):
if self.left.eval() >= self.right.eval():
return True
else:
return False
| 20.142857
| 49
| 0.547112
| 112
| 987
| 4.785714
| 0.214286
| 0.149254
| 0.16791
| 0.205224
| 0.727612
| 0.727612
| 0.727612
| 0.727612
| 0.727612
| 0.727612
| 0
| 0
| 0.348531
| 987
| 48
| 50
| 20.5625
| 0.833593
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.027778
| 0
| 0.638889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
60ed1977e5ae759dcb6f472270cc2f8d6c8152f0
| 2,820
|
py
|
Python
|
accounts/migrations/0003_auto_20200128_0949.py
|
deepak-shrivastava-au1/job_portal
|
a573a44a7c4d382faaf415533c3e4599fd5d6620
|
[
"MIT"
] | null | null | null |
accounts/migrations/0003_auto_20200128_0949.py
|
deepak-shrivastava-au1/job_portal
|
a573a44a7c4d382faaf415533c3e4599fd5d6620
|
[
"MIT"
] | null | null | null |
accounts/migrations/0003_auto_20200128_0949.py
|
deepak-shrivastava-au1/job_portal
|
a573a44a7c4d382faaf415533c3e4599fd5d6620
|
[
"MIT"
] | 1
|
2020-01-11T10:59:51.000Z
|
2020-01-11T10:59:51.000Z
|
# Generated by Django 2.1.15 on 2020-01-28 09:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20190326_1754'),
]
operations = [
migrations.AddField(
model_name='user',
name='address',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='city',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='country',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='dob',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='dob_city',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='dob_state',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='job_title',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='middle_name',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='mob',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='pin',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='state',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='tel',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='tot_exp_mon',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
migrations.AddField(
model_name='user',
name='tot_exp_yr',
field=models.CharField(blank=True, default='', max_length=78, null=True),
),
]
| 33.571429
| 85
| 0.544681
| 289
| 2,820
| 5.179931
| 0.186851
| 0.168337
| 0.215097
| 0.252505
| 0.867735
| 0.867735
| 0.844355
| 0.844355
| 0.844355
| 0.844355
| 0
| 0.031153
| 0.317021
| 2,820
| 83
| 86
| 33.975904
| 0.746106
| 0.016312
| 0
| 0.727273
| 1
| 0
| 0.064935
| 0.008297
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012987
| 0
| 0.051948
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
880db45415f92e403992de3c6632a07685bb6198
| 35,407
|
py
|
Python
|
startup/users/30-user-Richter.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | null | null | null |
startup/users/30-user-Richter.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | 13
|
2018-09-25T19:35:08.000Z
|
2021-01-15T20:42:26.000Z
|
startup/users/30-user-Richter.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | 3
|
2019-09-06T01:40:59.000Z
|
2020-07-01T20:27:39.000Z
|
def P_edge_measurments(t=1):
dets = [pil1M, pil300KW]
det_exposure_time(t,t)
names = ['s05_P3MEEMT_115C_KPF6', 's34_MM460_170_KPF6', 's30_MMM389_170_KPF6', 's38_MM461_170_KPF6', 's8_P3HT_ac_KPF6',
's42_MM389_170_KPF6', 's46_MM460_170_KPF6', 's50_MM461_170_KPF6']
x_piezo = [42000, 31000, 19000, 6000, -6000, -16000, -33000, -44000]
energies = [2140.0, 2145.0, 2150.0, 2155.0, 2157.0, 2157.5, 2158.0, 2158.5, 2159.0, 2159.5, 2160.0, 2160.5, 2161.0, 2161.5, 2162.0, 2162.5,
2163.0, 2163.5, 2164.0, 2164.5, 2165.0, 2165.5, 2166.0, 2170.0, 2175.0, 2180.0, 2185.0, 2190.0, 2195.0, 2200.0]
xbpm3_y = [1.416, 1.414, 1.412, 1.41, 1.4092, 1.409, 1.4088, 1.4086, 1.4084, 1.4082, 1.408, 1.4078, 1.4076, 1.4074, 1.4072, 1.407,
1.4068, 1.4066, 1.4064, 1.4062, 1.406, 1.4058, 1.4056, 1.404, 1.402, 1.4, 1.398, 1.396, 1.394, 1.392]
waxs_arc = [0, 17]
ai0 = 0
ai_list = [0.52, 0.80]
offset = 0 # offset to not measure again teh same position as sulfur
for name, xs in zip(names, x_piezo):
yield from bps.mv(piezo.x, xs)
yield from alignement_special(angle = 0.75)
ai0 = piezo.th.position
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
for k, ais in enumerate(ai_list):
yield from bps.mv(piezo.th, ai0 + ais)
yield from bps.mv(piezo.x, xs + offset + k*400)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos1_wa{wax}_bpm{xbpm}'
for e, xbpm3_ys in zip(energies, xbpm3_y):
yield from bps.mv(energy, e)
yield from bps.mv(xbpm3_pos.y, xbpm3_ys)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mvr(piezo.x, 200)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos2_wa{wax}_bpm{xbpm}'
for e, xbpm3_ys in zip(energies[::-1], xbpm3_y[::-1]):
yield from bps.mv(energy, e)
yield from bps.mv(xbpm3_pos.y, xbpm3_ys)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def transition_Cl_S_edges():
yield from bps.mv(energy, 2800)
yield from bps.sleep(5)
yield from bps.mv(energy, 2780)
yield from bps.sleep(5)
yield from bps.mv(energy, 2760)
yield from bps.sleep(5)
yield from bps.mv(energy, 2740)
yield from bps.sleep(5)
yield from bps.mv(energy, 2720)
yield from bps.sleep(5)
yield from bps.mv(energy, 2700)
yield from bps.sleep(5)
yield from bps.mv(energy, 2680)
yield from bps.sleep(5)
yield from bps.mv(energy, 2660)
yield from bps.sleep(5)
yield from bps.mv(energy, 2640)
yield from bps.sleep(5)
yield from bps.mv(energy, 2610)
yield from bps.sleep(5)
yield from bps.mv(energy, 2580)
yield from bps.sleep(5)
yield from bps.mv(energy, 2550)
yield from bps.sleep(5)
yield from bps.mv(energy, 2525)
yield from bps.sleep(5)
yield from bps.mv(energy, 2500)
yield from bps.sleep(5)
yield from bps.mv(energy, 2475)
yield from bps.sleep(5)
yield from bps.mv(energy, 2450)
yield from bps.sleep(5)
def transition_S_Cl_edges():
yield from bps.mv(energy, 2450)
yield from bps.sleep(5)
yield from bps.mv(energy, 2475)
yield from bps.sleep(5)
yield from bps.mv(energy, 2500)
yield from bps.sleep(5)
yield from bps.mv(energy, 2525)
yield from bps.sleep(5)
yield from bps.mv(energy, 2550)
yield from bps.sleep(5)
yield from bps.mv(energy, 2580)
yield from bps.sleep(5)
yield from bps.mv(energy, 2610)
yield from bps.sleep(5)
yield from bps.mv(energy, 2640)
yield from bps.sleep(5)
yield from bps.mv(energy, 2660)
yield from bps.sleep(5)
yield from bps.mv(energy, 2680)
yield from bps.sleep(5)
yield from bps.mv(energy, 2700)
yield from bps.sleep(5)
yield from bps.mv(energy, 2720)
yield from bps.sleep(5)
yield from bps.mv(energy, 2740)
yield from bps.sleep(5)
yield from bps.mv(energy, 2760)
yield from bps.sleep(5)
yield from bps.mv(energy, 2780)
yield from bps.sleep(5)
yield from bps.mv(energy, 2800)
yield from bps.sleep(5)
def Cl_edge_vertical(t=1):
dets = [pil300KW]
det_exposure_time(t,t)
#name = 's01_P3HT015_un', 's04_P3MEEMT_115_un', 's33_MM460_170_ClO4'
name = 's33_MM460_170_ClO4'
energies = [2820.0, 2830.0, 2832.0, 2834.0, 2834.5, 2835.0, 2835.5, 2836.0, 2836.5, 2837.0, 2837.5, 2838.0, 2838.5, 2839.0, 2839.5,
2840.0, 2840.5, 2841.0, 2841.5, 2845.0, 2850.0, 2855.0, 2860.0, 2865.0, 2870.0]
waxs_arc = [4, 10.5, 17, 45]
ai0 = piezo.th.position
for i, wa in enumerate(waxs_arc):
if i==0:
print('wa=4deg')
else:
yield from bps.mv(waxs, wa)
name_fmt = '{sample}_vertical_{energy}eV_ai0.8deg_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2850)
yield from bps.sleep(2)
yield from bps.mv(energy, 2830)
yield from bps.sleep(2)
yield from bps.mv(energy, 2810)
yield from bps.sleep(2)
def NEXAFS_P_edge(t=0.5):
yield from bps.mv(waxs, 45)
dets = [pil300KW]
name = 'NEXAFS_s3_test_Pedge_nspot1'
energies = np.linspace(2130, 2180, 51)
xbpm3_y = np.linspace(1.42, 1.40, 51)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e, xbpm3_ys in zip(energies, xbpm3_y):
yield from bps.mv(energy, e)
yield from bps.mv(xbpm3_pos.y, xbpm3_ys)
yield from bps.sleep(1)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumX.value)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def S_edge_vertical(t=1):
dets = [ pil300KW]
det_exposure_time(t,t)
#name = 's01_P3HT015_un', 's04_P3MEEMT_115_un', 's33_MM460_170_ClO4'
name = 'MM460_170'
energies = [2450.0, 2455.0, 2460.0, 2465.0, 2470.0, 2473.0, 2475.0, 2475.5, 2476.0, 2476.5, 2477.0, 2477.5, 2478.0, 2478.5, 2479.0, 2479.5,
2480.0, 2480.5, 2483.0, 2485.0, 2487.5, 2490.0, 2492.5, 2495.0, 2500.0, 2510.0, 2515.0]
# waxs_arc = [4, 10.5, 17]
waxs_arc = [10.5, 17]
ai0 = piezo.th.position
for i, wa in enumerate(waxs_arc):
if wa == 4:
print('wa=4deg')
else:
yield from bps.mv(waxs, wa)
name_fmt = '{sample}_vertical_{energy}eV_ai7.7deg_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2490)
yield from bps.sleep(1)
yield from bps.mv(energy, 2470)
yield from bps.sleep(1)
yield from bps.mv(energy, 2450)
yield from bps.sleep(1)
def giwaxs_Cl_edge_Lee_aois_2121_1(t=1):
dets = [pil1M, pil300KW]
# names = ['P3HT_600_KCl04_par', 'P3HT_500_KCl04', 'P3HT_neat', 'P3HT_600_KCl']
# x_piezo = [ -31000, -41000, -53000, -56000]
# x_hexa = [ 0, 0, 0, -8]
# z_piezo = [ 0, 0, 0, 0]
names = ['P3HT_KCl04_bilayer']
x_piezo = [ 50000]
x_hexa = [ 0]
z_piezo = [ 0]
dets = [pil1M, pil300KW]
waxs_arc = [0, 15]
for numero, (name, xs_piezo, xs_hexa, zs_piezo) in enumerate(zip(names, x_piezo, x_hexa, z_piezo)):
yield from bps.mv(stage.x, xs_hexa)
yield from bps.mv(piezo.x, xs_piezo)
yield from bps.mv(piezo.z, zs_piezo)
ai0 = 0
yield from bps.mv(piezo.th, ai0)
yield from alignement_gisaxs(angle = 0.4)
ai0 = piezo.th.position
yield from bps.mv(att2_9.open_cmd, 1)
yield from bps.sleep(1)
yield from bps.mv(att2_9.open_cmd, 1)
ai_list = np.arange(0.3, 0.8, 0.01).tolist()
ai_list = [round(1000*x, 4) for x in ai_list]
ai_list = np.asarray(ai_list)/1000
energies = [2820.0, 2838.5, 2870.0]
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs_piezo + k*600 + i*200)
for l, ais in enumerate(ai_list):
yield from bps.mv(piezo.th, ai0 + ais)
det_exposure_time(t,t)
name_fmt = '{sample}_pos1_aiscan_{energy}eV_ai{ai}_wa{wax}_bpm{xbpm}'
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%1.4f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
for k, e in enumerate(energies[::-1]):
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs_piezo + 1000 + k*600 + i*200)
for l, ais in enumerate(ai_list):
yield from bps.mv(piezo.th, ai0 + ais)
det_exposure_time(t,t)
name_fmt = '{sample}_pos2_aiscan_{energy}eV_ai{ai}_wa{wax}_bpm{xbpm}'
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def SVA_night_12_02(t=1):
global names, x_hexa, y_hexa, incident_angles, y_hexa_aligned
names = ['MM460_170C_ClO4', 'MM389_as_un', 'MM389_as_ClO4', 'MM389_170C_un', 'MM389_170C_ClO4']
x_hexa = [-17, 18, 9, -6, -15]
# y_hexa = [-3.2, -3.2, -3.2, -3.2, 3, 3, 3, 3]
# incident_angl = [ 2.8, 2.5, 2.2, 2.2, 2.2, 2.2, 2.2, 2.2]
# assert len(x_hexa) == len(names), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(names)})'
# assert len(x_hexa) == len(y_hexa), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(y_hexa)})'
# assert len(x_hexa) == len(incident_angl), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(incident_angles)})'
setDryFlow(5)
setWetFlow(0)
y_hexa_aligned = [-3.013, 3.311, 3.32, 3.356, 3.322]
incident_angles = [1.581, 1.199, 1.849, 1.367, 1.825]
# for name, xs_hexa, ys_hexa, ais in zip(names[4:], x_hexa[4:], y_hexa[4:], incident_angl[4:]):
# yield from bps.mv(stage.x, xs_hexa)
# yield from bps.mv(stage.y, ys_hexa)
# yield from bps.mv(stage.th, ais)
# yield from alignement_gisaxs_hex(angle = 0.45)
# incident_angles = incident_angles + [stage.th.position]
# y_hexa_aligned = y_hexa_aligned + [stage.y.position]
print(incident_angles)
print(y_hexa_aligned)
assert len(x_hexa) == len(names), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(names)})'
assert len(x_hexa) == len(y_hexa_aligned), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(y_hexa_aligned)})'
assert len(x_hexa) == len(incident_angles), f'Number of X coordinates ({len(x_hexa)}) is different from number of samples ({len(incident_angles)})'
humidity = '%3.2f'%readHumidity(verbosity=0)
# Measure the samples with N2 flow
offset = 0
yield from Cl_edge_SVA_measurments_2021_2(t=t, offset = offset, humidity = humidity)
# # Measure at flow 80 percent
# setDryFlow(2.)
# setWetFlow(4.35)
# yield from bps.sleep(40 * 60)
# humidity = '%3.2f'%readHumidity(verbosity=0)
# offset = 0.9
# yield from Cl_edge_SVA_measurments(t=t, offset = offset, humidity = humidity)
# # Measure at flow 100 percent
names = ['MM460_as_un', 'MM460_as_ClO4', 'MM460_170C_un', 'MM460_170C_ClO4', 'MM389_as_un', 'MM389_as_ClO4', 'MM389_170C_un', 'MM389_170C_ClO4']
x_hexa = [ 17, 6, -8.0, -17, 18, 9, -6, -15]
y_hexa_aligned = [-3.052, -3.06, -2.998, -3.013, 3.311, 3.32, 3.356, 3.322]
incident_angles = [1.94502, 1.77, 1.747, 1.581, 1.199, 1.849, 1.367, 1.825]
setDryFlow(0)
setWetFlow(5)
yield from bps.sleep(40 * 60)
humidity = '%3.2f'%readHumidity(verbosity=0)
offset = 1.5
yield from Cl_edge_SVA_measurments_2021_2(t=t, offset = offset, humidity = humidity)
# # Back at flow 0 percent
setDryFlow(5)
setWetFlow(0)
yield from bps.sleep(40 * 60)
humidity = '%3.2f_post'%readHumidity(verbosity=0)
offset = 3.0
yield from Cl_edge_SVA_measurments_2021_2(t=t, offset = offset, humidity = humidity)
def S_edge_measurments_transmission(t=1):
dets = [pil1M, pil900KW, pil300KW]
# names = ['P3MEEMT_13k_115C', 'P3MEEMT_23k_115C', 'MM460_170C', 'PB2T_TEG_undoped', 'PB2T_TEG_partialCV', 'PB2T_TEG_partial_dedope',
# 'PB2T_TEG_doped400mV', 'KClO4_neat']
# x_piezo = [28100, 20500, 12500, 4700, -800, -6800, -12000, -19000]
# y_piezo = [ 400, 400, 400, 500, 400, 200, 300, 300]
names = ['P3MEEMT_13k_115C', 'P3MEEMT_23k_115C', 'MM460_170C']
x_piezo = [ 27400, 19700, 11800]
y_piezo = [ 0, -100, -100]
assert len(x_piezo) == len(names), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(names)})'
assert len(x_piezo) == len(y_piezo), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(y_piezo)})'
energies = [2450.0, 2455.0, 2460.0, 2465.0, 2470.0, 2473.0, 2475.0, 2475.5, 2476.0, 2476.5, 2477.0, 2477.5, 2478.0, 2478.5, 2479.0, 2479.5,
2480.0, 2480.5, 2483.0, 2485.0, 2487.5, 2490.0, 2492.5, 2495.0, 2500.0, 2510.0, 2515.0]
waxs_arc = [23]
det_exposure_time(t,t)
for numb, (name, xs, ys) in enumerate(zip(names, x_piezo, y_piezo)):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 27)
for i, wa in enumerate(waxs_arc):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(waxs, wa)
name_fmt = '{sample}_saxsredo_{energy}eV_pos1_wa{wax}_bpm{xbpm}'
for e, ysss in zip(energies, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mvr(piezo.x, 400)
name_fmt = '{sample}_{energy}eV_pos2_wa{wax}_bpm{xbpm}'
for e, ysss in zip(energies[::-1], yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def Cl_edge_measurments_transmission(t=1):
dets = [pil1M, pil900KW, pil300KW]
# names = ['PB2T_TEG_undoped', 'PB2T_TEG_partialCV', 'PB2T_TEG_partial_dedope', 'PB2T_TEG_doped400mV', 'KClO4_neat']
# x_piezo = [3800, -1800, -7800, -13300, -20000]
# y_piezo = [ 400, 200, 0, 300, 300]
names = [ 'PB2T_TEG_doped400mV', 'KClO4_neat']
x_piezo = [-12500, -20000]
y_piezo = [ 300, 300]
assert len(x_piezo) == len(names), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(names)})'
assert len(x_piezo) == len(y_piezo), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(y_piezo)})'
energies = [2810.0, 2820.0, 2830.0, 2832.0, 2834.0, 2834.5, 2835.0, 2835.5, 2836.0, 2836.5, 2837.0, 2837.5, 2838.0, 2838.5, 2839.0, 2839.5,
2840.0, 2840.5, 2841.0, 2841.5, 2845.0, 2850.0, 2855.0, 2860.0, 2865.0, 2870.0, 2875.0, 2880.0, 2890.0]
waxs_arc = [2, 23]
det_exposure_time(t,t)
for numb, (name, xs, ys) in enumerate(zip(names, x_piezo, y_piezo)):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 27)
for i, wa in enumerate(waxs_arc):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(waxs, wa)
name_fmt = '{sample}_saxs_{energy}eV_pos1_wa{wax}_bpm{xbpm}'
for e, ysss in zip(energies, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mvr(piezo.x, 300)
name_fmt = '{sample}_{energy}eV_pos2_wa{wax}_bpm{xbpm}'
for e, ysss in zip(energies[::-1], yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def S_edge_SVA_measurments_2021_3(t=1, offset = 1, humidity = 'test'):
names = [ 'PB2T_TEG_doped400mV']
x_hexa = [-12500]
y_hexa = [ 300]
dets = [pil1M, pil300KW, pil900KW]
det_exposure_time(t,t)
energies = [2450.0, 2455.0, 2460.0, 2465.0, 2470.0, 2473.0, 2475.0, 2475.5, 2476.0, 2476.5, 2477.0, 2477.5, 2478.0, 2478.5, 2479.0, 2479.5,
2480.0, 2480.5, 2483.0, 2485.0, 2487.5, 2490.0, 2492.5, 2495.0, 2500.0, 2510.0, 2515.0]
waxs_arc = [2, 23]
ai_list = [0.80]
for name, xs_hexa, incident_ang, ys_hexap in zip(names, x_hexa, incident_angles, y_hexa):
yield from bps.mv(stage.x, xs_hexa + offset)
xs = xs_hexa + offset
yield from alignement_gisaxs_hex(angle = 0.45)
yield from bps.mv(stage.y, ys_hexap)
yield from bps.mv(stage.th, incident_ang)
ai0 = incident_ang
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
counter = 0
for k, ais in enumerate(ai_list):
yield from bps.mv(stage.th, ai0 + ais)
name_fmt = '{sample}_hum{hum}_{energy}eV_ai{ai}_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(stage.x, xs + counter * 0.025)
counter += 1
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, hum = humidity, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
name_fmt = '{sample}_hum{hum}_{energy}eV_ai{ai}_pos2_wa{wax}_bpm{xbpm}'
for e in energies[::-1]:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(stage.x, xs + counter * 0.025)
counter += 1
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, hum = humidity, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def Cl_edge_measurments_2021_3(t=1):
dets = [pil1M, pil900KW, pil300KW]
det_exposure_time(t,t)
names = ['P3HHT_600mV', 'P3HT_600mV', 'P3PAAT_600mV', 'P3MEEMT_23KDa_115C_600mV', 'P3MEEMT_13KDa_115C_600mV', 'P3MEEMT_13KDa_115C_450mV', 'P3MEEMT_13KDa_115C_400mV', 'P3MEEMT_13KDa_115C_350mV',
'P3MEEMT_13KDa_115C_325mV', 'P3MEEMT_13KDa_115C_300mV', 'P3MEEMT_13KDa_115C_275mV', 'P3MEEMT_13KDa_115C_0mV']
x_piezo = [ 58000, 58000, 51000, 37000, 21000, 8000, -6000, -19000, -32000, -43000, -50000, -52000]
x_hexap = [ 15, 4, 0, 0, 0, 0, 0, 0, 0, 0, -4, -15]
y_piezo = [ 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000]
assert len(x_piezo) == len(names), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(names)})'
assert len(x_piezo) == len(y_piezo), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(y_piezo)})'
assert len(x_piezo) == len(x_hexap), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(x_hexap)})'
energies = [2810.0, 2820.0, 2830.0, 2832.0, 2834.0, 2834.5, 2835.0, 2835.5, 2836.0, 2836.5, 2837.0, 2837.5, 2838.0, 2838.5, 2839.0, 2839.5,
2840.0, 2840.5, 2841.0, 2841.5, 2845.0, 2850.0, 2855.0, 2860.0, 2865.0, 2870.0, 2875.0, 2880.0, 2890.0]
waxs_arc = [2, 23]
ai0 = 0
ai_list = [0.80]
for name, xs, ys, xs_hexap in zip(names, x_piezo, y_piezo, x_hexap):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(stage.x, xs_hexap)
yield from bps.mv(piezo.th, ai0)
yield from alignement_gisaxs(0.40)
yield from bps.mv(att2_9.open_cmd, 1)
yield from bps.sleep(1)
yield from bps.mv(att2_9.open_cmd, 1)
ai0 = piezo.th.position
det_exposure_time(t,t)
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
yield from bps.mv(piezo.x, xs)
counter = 0
for k, ais in enumerate(ai_list):
yield from bps.mv(piezo.th, ai0 + ais)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs + counter * 30)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos2_wa{wax}_bpm{xbpm}'
for e in energies[::-1]:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs + counter * 30)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
# name_fmt = '{sample}_{energy}eV_ai{ai}_pos3_wa{wax}_bpm{xbpm}'
# for e in energies:
# yield from bps.mv(energy, e)
# yield from bps.sleep(1)
# yield from bps.mv(piezo.x, xs + counter * 30)
# counter += 1
# bpm = xbpm2.sumX.value
# sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
# sample_id(user_name='LR', sample_name=sample_name)
# print(f'\n\t=== Sample: {sample_name} ===\n')
# yield from bp.count(dets, num=1)
# name_fmt = '{sample}_{energy}eV_ai{ai}_pos4_wa{wax}_bpm{xbpm}'
# for e in energies[::-1]:
# yield from bps.mv(energy, e)
# yield from bps.sleep(1)
# yield from bps.mv(piezo.x, xs + counter * 30)
# counter += 1
# bpm = xbpm2.sumX.value
# sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
# sample_id(user_name='LR', sample_name=sample_name)
# print(f'\n\t=== Sample: {sample_name} ===\n')
# yield from bp.count(dets, num=1)
def S_edge_measurments_2021_3(t=1):
dets = [pil1M, pil900KW, pil300KW]
det_exposure_time(t,t)
# names = ['P3HHT_600mV', 'P3HT_600mV', 'P3PAAT_600mV', 'P3MEEMT_23KDa_115C_600mV', 'P3MEEMT_13KDa_115C_600mV', 'P3MEEMT_13KDa_115C_450mV', 'P3MEEMT_13KDa_115C_400mV', 'P3MEEMT_13KDa_115C_350mV',
# 'P3MEEMT_13KDa_115C_325mV', 'P3MEEMT_13KDa_115C_300mV', 'P3MEEMT_13KDa_115C_275mV', 'P3MEEMT_13KDa_115C_0mV']
# x_piezo = [ 58000, 58000, 51000, 37000, 21000, 8000, -6000, -19000, -32000, -43000, -50000, -52000]
# x_hexap = [ 15, 4, 0, 0, 0, 0, 0, 0, 0, 0, -4, -15]
# y_piezo = [ 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000]
names = ['P3MEEMT_23KDa_115C_600mV', 'P3MEEMT_13KDa_115C_600mV', 'P3MEEMT_13KDa_115C_450mV', 'P3MEEMT_13KDa_115C_400mV', 'P3MEEMT_13KDa_115C_350mV',
'P3MEEMT_13KDa_115C_325mV', 'P3MEEMT_13KDa_115C_300mV', 'P3MEEMT_13KDa_115C_275mV', 'P3MEEMT_13KDa_115C_0mV']
x_piezo = -5000 + np.asarray([ 37000, 21000, 8000, -6000, -19000, -32000, -43000, -50000, -50000])
x_hexap = [ 0, 0, 0, 0, 0, 0, 0, -4, -16]
y_piezo = [ 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000, 6000]
assert len(x_piezo) == len(names), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(names)})'
assert len(x_piezo) == len(y_piezo), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(y_piezo)})'
assert len(x_piezo) == len(x_hexap), f'Number of X coordinates ({len(x_piezo)}) is different from number of samples ({len(x_hexap)})'
energies = [2450.0, 2455.0, 2460.0, 2465.0, 2470.0, 2473.0, 2475.0, 2475.5, 2476.0, 2476.5, 2477.0, 2477.5, 2478.0, 2478.5, 2479.0, 2479.5,
2480.0, 2480.5, 2483.0, 2485.0, 2487.5, 2490.0, 2492.5, 2495.0, 2500.0, 2510.0, 2515.0]
waxs_arc = [2, 23]
ai0 = 0
ai_list = [0.80]
for name, xs, ys, xs_hexap in zip(names, x_piezo, y_piezo, x_hexap):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(stage.x, xs_hexap)
yield from bps.mv(piezo.th, ai0)
yield from alignement_gisaxs(0.40)
yield from bps.mv(att2_9.open_cmd, 1)
yield from bps.sleep(1)
yield from bps.mv(att2_9.open_cmd, 1)
ai0 = piezo.th.position
det_exposure_time(t,t)
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
yield from bps.mv(piezo.x, xs)
counter = 0
for k, ais in enumerate(ai_list):
yield from bps.mv(piezo.th, ai0 + ais)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs + counter * 30)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos2_wa{wax}_bpm{xbpm}'
for e in energies[::-1]:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xs + counter * 30)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def night_2021_12_15(t=1):
proposal_id('2021_3', '307296_Richter4')
yield from S_edge_measurments_2021_3(t=t)
proposal_id('2021_3', '308274_Ferron5')
yield from xrr_spol_waxs()
def Cl_edge_measurments_2021_3_hex(t=1):
dets = [pil1M, pil900KW, pil300KW]
det_exposure_time(t,t)
names = ['20um_blank']
x_hexap = [ 18]
assert len(x_hexap) == len(names), f'Number of X coordinates ({len(x_hexap)}) is different from number of samples ({len(names)})'
energies = [2810.0]#, 2820.0, 2830.0, 2832.0, 2834.0, 2834.5, 2835.0, 2835.5, 2836.0, 2836.5, 2837.0, 2837.5, 2838.0, 2838.5, 2839.0, 2839.5,
# 2840.0, 2840.5, 2841.0, 2841.5, 2845.0, 2850.0, 2855.0, 2860.0, 2865.0, 2870.0, 2875.0, 2880.0, 2890.0]
waxs_arc = [2, 23]
ai0 = 0
ai_list = [0.80]
for name, xs_hexap in zip(names, x_hexap):
yield from bps.mv(stage.x, xs_hexap)
yield from alignement_gisaxs_hex(angle = 0.45)
ai0 = stage.th.position
det_exposure_time(t,t)
for i, wa in enumerate(waxs_arc):
yield from bps.mv(waxs, wa)
counter = 0
for k, ais in enumerate(ai_list):
yield from bps.mv(stage.th, ai0 + ais)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos1_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(stage.x, xs_hexap + counter * 0.02)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
name_fmt = '{sample}_{energy}eV_ai{ai}_pos2_wa{wax}_bpm{xbpm}'
for e in energies[::-1]:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
if xbpm2.sumX.get() < 120:
yield from bps.sleep(5)
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(stage.x, xs_hexap + counter * 0.02)
counter += 1
bpm = xbpm2.sumX.get()
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%3.2f'%ais, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='LR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(stage.th, ai0)
| 41.508792
| 199
| 0.553196
| 5,336
| 35,407
| 3.516679
| 0.078711
| 0.120863
| 0.139408
| 0.102958
| 0.878337
| 0.867892
| 0.851266
| 0.840927
| 0.823501
| 0.812204
| 0
| 0.144673
| 0.308922
| 35,407
| 852
| 200
| 41.557512
| 0.622216
| 0.115712
| 0
| 0.768014
| 0
| 0.003515
| 0.135408
| 0.047217
| 0
| 0
| 0
| 0
| 0.024605
| 1
| 0.026362
| false
| 0
| 0
| 0
| 0.026362
| 0.040422
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
716c5ff57369e961defa7ad4eba0f2da002592ac
| 10,645
|
py
|
Python
|
Deprecated/PythonClient/test/unit_tests/test_recording.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 63
|
2019-01-15T12:06:53.000Z
|
2021-09-24T03:22:38.000Z
|
Deprecated/PythonClient/test/unit_tests/test_recording.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 5
|
2018-05-14T20:31:57.000Z
|
2018-09-01T15:40:37.000Z
|
Deprecated/PythonClient/test/unit_tests/test_recording.py
|
AbdulHoffmann/carla_carissma
|
8d382769ffa02a6c61a22c57160285505f5ff0a4
|
[
"MIT"
] | 40
|
2019-01-08T14:24:36.000Z
|
2022-01-04T23:46:30.000Z
|
import os
import unittest
from carla.driving_benchmark.recording import Recording
class testRecording(unittest.TestCase):
def test_init(self):
"""
The recording should have a reasonable full name
"""
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
_ = open(os.path.join(recording._path, 'summary.csv'), 'r')
_ = open(os.path.join(recording._path, 'measurements.csv'), 'r')
# There should be three files in any newly created case
self.assertEqual(len(os.listdir(recording._path)), 3)
def test_write_summary_results(self):
"""
Test writting summary results.
"""
from carla.driving_benchmark.experiment import Experiment
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
with open(os.path.join(recording._path, 'summary.csv'), 'r') as f:
header = f.readline().split(',')
# Assert if header is header
self.assertIn('exp_id', header)
self.assertEqual(len(header), len(recording._dict_summary))
# Assert if there is something writen in the row
written_row = f.readline().split(',')
# Assert if the number of collums is correct
self.assertEqual(len(written_row), len(recording._dict_summary))
def test_write_summary_results_good_order(self):
"""
Test if the summary results are writen in the same order on a new process
"""
from carla.driving_benchmark.experiment import Experiment
recording = Recording(name_to_save='Test_good_order',
continue_experiment=False, save_images=True)
for _ in range(0, 10):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording = Recording(name_to_save='Test_good_order',
continue_experiment=True, save_images=True)
for _ in range(0, 10):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording = Recording(name_to_save='Test_good_order',
continue_experiment=True, save_images=True)
for _ in range(0, 10):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording = Recording(name_to_save='Test_good_order',
continue_experiment=True, save_images=True)
for _ in range(0, 10):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
# Check if the the test_good_order summaries have all the same files.
def test_write_measurements_results(self):
"""
Test writing a few measurements into the log
"""
import os
from carla.driving_benchmark.experiment import Experiment
from carla.carla_server_pb2 import Measurements
from carla.carla_server_pb2 import Control
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
reward_vec = [Measurements().player_measurements for _ in range(20)]
control_vec = [Control() for _ in range(25)]
recording.write_measurements_results(experiment=Experiment(),
rep=1, pose=[24, 32], reward_vec=reward_vec,
control_vec=control_vec)
with open(os.path.join(recording._path, 'measurements.csv'), 'r') as f:
header = f.readline().split(',')
# Assert if header is header
self.assertIn('exp_id', header)
self.assertEqual(len(header), len(recording._dict_measurements))
# Assert if there is something writen in the row
written_row = f.readline().split(',')
# Assert if the number of collums is correct
self.assertEqual(len(written_row), len(recording._dict_measurements))
def test_continue_experiment(self):
"""
Test if you are able to continue an experiment after restarting the process
"""
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
# A just started case should return the continue experiment case
self.assertEqual(recording._continue_experiment(True)[1], 1)
# If you don't want to continue, should return also one
self.assertEqual(recording._continue_experiment(False)[1], 1)
from carla.driving_benchmark.experiment import Experiment
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
# After writing two experiments it should return 2, so you could start writing os pos 3
self.assertEqual(recording._continue_experiment(True)[1], 3)
# If you dont want to continue, should return also one
self.assertEqual(recording._continue_experiment(False)[1], 1)
def test_get_pose_and_experiment(self):
"""
Test getting the pose and the experiment from a previous executed benchmark
"""
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
from carla.driving_benchmark.experiment import Experiment
pose, experiment = recording.get_pose_and_experiment(25)
# An starting experiment should return zero zero
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 2)
self.assertEqual(experiment, 0)
for _ in range(23):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
for _ in range(23):
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(25)
self.assertEqual(pose, 23)
self.assertEqual(experiment, 1)
def test_get_pose_and_experiment_corner(self):
"""
Test getting the pose from multiple cases.
"""
from carla.driving_benchmark.experiment import Experiment
recording = Recording(name_to_save='Test1',
continue_experiment=False, save_images=True)
pose, experiment = recording.get_pose_and_experiment(1)
# An starting experiment should return one
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
pose, experiment = recording.get_pose_and_experiment(2)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 0)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(1)
print(pose, experiment)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
pose, experiment = recording.get_pose_and_experiment(2)
print(pose, experiment)
# An starting experiment should return one
self.assertEqual(pose, 1)
self.assertEqual(experiment, 0)
pose, experiment = recording.get_pose_and_experiment(3)
print(pose, experiment)
# An starting experiment should return one
self.assertEqual(pose, 1)
self.assertEqual(experiment, 0)
recording.write_summary_results(experiment=Experiment(), pose=[24, 32], rep=1,
path_distance=200, remaining_distance=0,
final_time=0.2, time_out=49, result=1)
pose, experiment = recording.get_pose_and_experiment(2)
self.assertEqual(pose, 0)
self.assertEqual(experiment, 1)
pose, experiment = recording.get_pose_and_experiment(3)
self.assertEqual(pose, 2)
self.assertEqual(experiment, 0)
if __name__ == '__main__':
unittest.main()
| 41.745098
| 95
| 0.591639
| 1,186
| 10,645
| 5.100337
| 0.124789
| 0.076872
| 0.047115
| 0.060175
| 0.817986
| 0.796826
| 0.786411
| 0.743594
| 0.703257
| 0.668706
| 0
| 0.036545
| 0.321372
| 10,645
| 254
| 96
| 41.909449
| 0.800803
| 0.110474
| 0
| 0.789116
| 0
| 0
| 0.018566
| 0
| 0
| 0
| 0
| 0
| 0.22449
| 1
| 0.047619
| false
| 0
| 0.081633
| 0
| 0.136054
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
716dcb594218098c2a38d1874789f79caeb0f0bd
| 5,368
|
py
|
Python
|
test/core/finally_test.py
|
kbelova/catcher
|
90c8ae02fbd8841b2786bb757e56749e26bb78c3
|
[
"Apache-2.0"
] | 84
|
2018-03-03T21:11:06.000Z
|
2022-02-19T14:50:20.000Z
|
test/core/finally_test.py
|
kbelova/catcher
|
90c8ae02fbd8841b2786bb757e56749e26bb78c3
|
[
"Apache-2.0"
] | 131
|
2019-01-08T18:49:23.000Z
|
2022-03-29T04:00:30.000Z
|
test/core/finally_test.py
|
kbelova/catcher
|
90c8ae02fbd8841b2786bb757e56749e26bb78c3
|
[
"Apache-2.0"
] | 11
|
2019-01-10T10:47:12.000Z
|
2021-09-24T05:13:40.000Z
|
import os
from os.path import join
from catcher.core.runner import Runner
from test.abs_test_class import TestClass
from test.test_utils import check_file
class FinallyTest(TestClass):
def __init__(self, method_name):
super().__init__('finally_test', method_name)
def test_run_finally(self):
self.populate_file('main.yaml', '''---
steps:
- echo: {from: '123', to: sys_env.output}
finally:
- sh:
command: 'rm sys_env.output'
path: '{{ CURRENT_DIR }}'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertFalse(check_file(join(self.test_dir, 'sys_env.output'), '123'))
def test_run_finally_fail_fail(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '1', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'fail'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertTrue(os.path.exists(join(self.test_dir, 'test')))
def test_run_finally_fail_pass(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '2', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'fail'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertFalse(os.path.exists(join(self.test_dir, 'test')))
def test_run_finally_pass_fail(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '1', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'pass'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertFalse(os.path.exists(join(self.test_dir, 'test')))
def test_run_finally_pass_pass(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '2', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'pass'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertTrue(os.path.exists(join(self.test_dir, 'test')))
def test_run_finally_always_pass(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '2', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'always'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertTrue(os.path.exists(join(self.test_dir, 'test')))
def test_run_finally_always_fail(self):
self.populate_file('main.yaml', '''---
steps:
- check: {equals: {the: '1', is: '2'}}
finally:
- sh:
command: 'mkdir test'
path: '{{ CURRENT_DIR }}'
run_if: 'always'
''')
runner = Runner(self.test_dir,
join(self.test_dir, 'main.yaml'),
None,
system_environment=dict(os.environ))
runner.run_tests()
self.assertTrue(os.path.exists(join(self.test_dir, 'test')))
| 41.292308
| 82
| 0.402198
| 461
| 5,368
| 4.466377
| 0.125813
| 0.081593
| 0.11219
| 0.101991
| 0.840214
| 0.828072
| 0.828072
| 0.812045
| 0.812045
| 0.812045
| 0
| 0.006515
| 0.485283
| 5,368
| 129
| 83
| 41.612403
| 0.73869
| 0
| 0
| 0.84874
| 0
| 0
| 0.490499
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.067227
| false
| 0.05042
| 0.042017
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e0be65fde7d0a88ea66f664d11b5063ad3133f74
| 21,014
|
py
|
Python
|
tests/test_core/test_text/test_text_effects.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
tests/test_core/test_text/test_text_effects.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
tests/test_core/test_text/test_text_effects.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
import pygame
import pygame.freetype
import pytest
from pygame_gui.core.text import TypingAppearEffect, FadeInEffect, FadeOutEffect
from pygame_gui.core.text import TextLineChunkFTFont
from pygame_gui.core.text.text_effects import BounceEffect, TiltEffect, ExpandContractEffect
from pygame_gui.ui_manager import UIManager
from pygame_gui.elements.ui_text_box import UITextBox
from pygame_gui import TEXT_EFFECT_FADE_OUT, TEXT_EFFECT_FADE_IN, TEXT_EFFECT_TYPING_APPEAR
from pygame_gui import TEXT_EFFECT_BOUNCE, TEXT_EFFECT_TILT, TEXT_EFFECT_EXPAND_CONTRACT
from pygame_gui import UITextEffectType, UI_TEXT_EFFECT_FINISHED
class TestTypingAppearEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('Hello world', pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
typing_effect = TypingAppearEffect(text_owner=text_box)
assert typing_effect.text_owner == text_box
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
typing_effect = TypingAppearEffect(text_owner=text_box)
assert typing_effect.text_progress == 0
typing_effect.update(time_delta=0.06)
typing_effect.update(time_delta=0.06)
typing_effect.update(time_delta=0.06)
typing_effect.update(time_delta=0.06)
assert typing_effect.text_progress == 2
def test_has_text_block_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
typing_effect = TypingAppearEffect(text_owner=text_box)
assert not typing_effect.has_text_changed()
typing_effect.update(time_delta=0.06)
typing_effect.update(time_delta=0.06)
assert typing_effect.has_text_changed()
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TYPING_APPEAR,
params={'time_per_letter': 3.0,
'time_per_letter_deviation': 1.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'],
TypingAppearEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TYPING_APPEAR, effect_tag='test')
effect: TypingAppearEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_TYPING_APPEAR
class TestFadeInEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('Hello world', pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_in_effect = FadeInEffect(text_box=text_box)
assert fade_in_effect.text_owner == text_box
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_in_effect = FadeInEffect(text_box=text_box)
assert fade_in_effect.alpha_value == 0
fade_in_effect.update(time_delta=0.06)
fade_in_effect.update(time_delta=0.06)
assert fade_in_effect.alpha_value == (0.12 / fade_in_effect.time_per_alpha_change)
def test_has_text_block_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_in_effect = FadeInEffect(text_box=text_box)
assert not fade_in_effect.has_text_changed()
fade_in_effect.update(time_delta=0.06)
fade_in_effect.update(time_delta=0.06)
assert fade_in_effect.has_text_changed()
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_FADE_IN,
params={'time_per_alpha_change': 19.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], FadeInEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_FADE_IN, effect_tag='test')
effect: FadeInEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_FADE_IN
class TestFadeOutEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('Hello world', pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_out_effect = FadeOutEffect(text_owner=text_box)
assert fade_out_effect.text_owner == text_box
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_out_effect = FadeOutEffect(text_owner=text_box)
assert fade_out_effect.alpha_value == 255
fade_out_effect.update(time_delta=0.06)
fade_out_effect.update(time_delta=0.06)
assert fade_out_effect.alpha_value == 255 - (0.12 / fade_out_effect.time_per_alpha_change)
def test_has_text_block_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('hello <font color=#FF0000>this is a</font> test',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
fade_out_effect = FadeOutEffect(text_owner=text_box)
assert not fade_out_effect.has_text_changed()
fade_out_effect.update(time_delta=0.06)
fade_out_effect.update(time_delta=0.06)
assert fade_out_effect.has_text_changed()
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_FADE_OUT,
params={'time_per_alpha_change': 19.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], FadeOutEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_FADE_OUT, effect_tag='test')
effect: FadeOutEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_FADE_OUT
class TestBounceEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE, effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], BounceEffect)
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE, effect_tag='test')
effect = text_box.active_text_chunk_effects[0]['effect']
assert effect.bounce_height == 0
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.bounce_height != 0
def test_has_text_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE, effect_tag='test')
effect: BounceEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.has_text_changed()
def test_apply_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE, effect_tag='test')
effect = text_box.active_text_chunk_effects[0]['effect']
chunk = text_box.active_text_chunk_effects[0]['chunk']
assert chunk.effects_offset_pos == (0, 0)
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
effect.apply_effect()
assert chunk.effects_offset_pos != (0, 0)
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE,
params={'loop': False,
'bounce_max_height': 10,
'time_to_complete_bounce': 19.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], BounceEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_BOUNCE, effect_tag='test',
params={'loop': False})
effect: BounceEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_BOUNCE
class TestTiltEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT, effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], TiltEffect)
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT, effect_tag='test')
effect: TiltEffect = text_box.active_text_chunk_effects[0]['effect']
assert effect.current_rotation == 0
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.current_rotation != 0
def test_has_text_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT, effect_tag='test')
effect: TiltEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.has_text_changed()
def test_apply_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT, effect_tag='test')
effect: TiltEffect = text_box.active_text_chunk_effects[0]['effect']
chunk: TextLineChunkFTFont = text_box.active_text_chunk_effects[0]['chunk']
assert chunk.effects_rotation == 0
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
effect.apply_effect()
assert chunk.effects_rotation != 0
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT,
params={'loop': False,
'max_rotation': 360,
'time_to_complete_rotation': 9.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], TiltEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_TILT, effect_tag='test',
params={'loop': False})
effect: TiltEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_TILT
class TestExpandContractEffect:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT, effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], ExpandContractEffect)
def test_update(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT, effect_tag='test')
effect: ExpandContractEffect = text_box.active_text_chunk_effects[0]['effect']
assert effect.current_scale == 1.0
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.current_scale != 1.0
def test_has_text_changed(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT, effect_tag='test')
effect: ExpandContractEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
assert effect.has_text_changed()
def test_apply_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT, effect_tag='test')
effect: ExpandContractEffect = text_box.active_text_chunk_effects[0]['effect']
chunk: TextLineChunkFTFont = text_box.active_text_chunk_effects[0]['chunk']
assert chunk.effects_scale == 1.0
effect.update(time_delta=0.06)
effect.update(time_delta=0.06)
effect.apply_effect()
assert chunk.effects_scale != 1.0
def test_params(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT,
params={'loop': False,
'max_scale': 2.0,
'time_to_complete_expand_contract': 10.0},
effect_tag='test')
assert isinstance(text_box.active_text_chunk_effects[0]['effect'], ExpandContractEffect)
def test_finish_effect(self, _init_pygame, default_ui_manager: UIManager):
text_box = UITextBox('<effect id=test>Hello world</effect>',
pygame.Rect((10, 10), (200, 100)),
default_ui_manager)
text_box.set_active_effect(TEXT_EFFECT_EXPAND_CONTRACT, effect_tag='test',
params={'loop': False})
effect: ExpandContractEffect = text_box.active_text_chunk_effects[0]['effect']
assert not effect.has_text_changed()
effect.update(time_delta=0.06)
effect.update(time_delta=20.0)
effect.update(time_delta=0.06)
for event in pygame.event.get():
if event.type == UI_TEXT_EFFECT_FINISHED:
assert event.effect == TEXT_EFFECT_EXPAND_CONTRACT
class TestTextEffectType:
def basic_tests(self, _init_pygame):
test_effect_type = UITextEffectType('test_effect')
assert 'this is test_effect' == 'this is ' + test_effect_type
assert 'test_effect this is' == test_effect_type + ' this is'
assert 'text_effect' == test_effect_type
assert 'text_effect' == str(test_effect_type)
with pytest.raises(AttributeError, match="Can't append to anything other than a string"):
test_effect_type + 5
with pytest.raises(AttributeError,
match="Can't append to anything other than a string"):
val = 5 + test_effect_type
if __name__ == '__main__':
pytest.console_main()
| 41.203922
| 98
| 0.622204
| 2,549
| 21,014
| 4.785014
| 0.051393
| 0.057391
| 0.086579
| 0.086087
| 0.906288
| 0.894728
| 0.879725
| 0.865541
| 0.865541
| 0.865131
| 0
| 0.039061
| 0.280004
| 21,014
| 509
| 99
| 41.284872
| 0.767085
| 0
| 0
| 0.739377
| 0
| 0
| 0.087608
| 0.006995
| 0
| 0
| 0
| 0
| 0.164306
| 1
| 0.096317
| false
| 0
| 0.031161
| 0
| 0.147309
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4613aa9425c580ddf824e57bc2178ceefd2b3091
| 720
|
py
|
Python
|
tests/test_time_drift.py
|
getslash/flux
|
4bd031e585e0df42e18bd87797df6ac5ab2ece52
|
[
"BSD-3-Clause"
] | 6
|
2016-11-29T11:01:20.000Z
|
2022-03-04T20:00:05.000Z
|
tests/test_time_drift.py
|
getslash/flux
|
4bd031e585e0df42e18bd87797df6ac5ab2ece52
|
[
"BSD-3-Clause"
] | 3
|
2018-12-12T08:59:28.000Z
|
2020-10-06T05:51:18.000Z
|
tests/test_time_drift.py
|
getslash/flux
|
4bd031e585e0df42e18bd87797df6ac5ab2ece52
|
[
"BSD-3-Clause"
] | 2
|
2016-05-22T15:27:56.000Z
|
2019-01-28T12:33:42.000Z
|
from flux import timeline
import pytest
def test_time_drift_default_factor(forge, mocked_time_module):
forge.replace_with(timeline, "time", mocked_time_module)
t = timeline.Timeline()
assert t.time() == mocked_time_module.time()
mocked_time_module.__advance__()
assert t.time() == mocked_time_module.time()
def test_time_drift_change_restore_default_factor(forge, mocked_time_module):
forge.replace_with(timeline, "time", mocked_time_module)
t = timeline.Timeline()
assert t.time() == mocked_time_module.time()
mocked_time_module.__advance__()
t.set_time_factor(1.5)
t.set_time_factor(1)
mocked_time_module.__advance__()
assert t.time() == mocked_time_module.time()
| 36
| 77
| 0.754167
| 101
| 720
| 4.881188
| 0.237624
| 0.223124
| 0.356998
| 0.324544
| 0.837728
| 0.776876
| 0.776876
| 0.776876
| 0.776876
| 0.776876
| 0
| 0.004831
| 0.1375
| 720
| 19
| 78
| 37.894737
| 0.78905
| 0
| 0
| 0.647059
| 0
| 0
| 0.011111
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1cc772287aef5d37b56e1ae3cf3e48bc1262a604
| 5,497
|
py
|
Python
|
convert/rtkgps2pose/TF.py
|
jiexuan/evaluation_tools
|
d8cab5cea2c859ef6067aaedc8cf11be102ad7f8
|
[
"MIT"
] | 12
|
2019-05-13T10:20:47.000Z
|
2022-02-16T03:40:47.000Z
|
convert/rtkgps2pose/TF.py
|
michaelczhou/evaluation_tools
|
1ef3f6d65869990eb35b6e69106a77e0baf2c0b4
|
[
"MIT"
] | null | null | null |
convert/rtkgps2pose/TF.py
|
michaelczhou/evaluation_tools
|
1ef3f6d65869990eb35b6e69106a77e0baf2c0b4
|
[
"MIT"
] | 7
|
2019-04-24T02:33:09.000Z
|
2021-01-13T08:33:38.000Z
|
#!/usr/bin/env python
import math
gps_length_x=0#.50 #-- the vertical distance between the center point of car with gps module
gps_length_y=0#.29 #-- the horizontal distance between the center point of car with gps module
class TF:
def __init__(self):
self.a=0
#TargetPoint[0]==North(x,head) TargetPoint[1]==East(y,right) TargetPoint[3]=yaw(shun shi zhen,x dir is 0 and 2pi)
#get the target point in baselink coordinate system
#TargetPoint and BaseLinkPoint is in rtkGPS coordinate system
def rtkGPStoBaseLink(self,TargetPoint, BaseLinkPoint):
X_rtkGPS_Target = TargetPoint[0]
Y_rtkGPS_Target = TargetPoint[1]
YAW_rtkGPS_Target = TargetPoint[2]
X_rtkGPS_BaseLink = BaseLinkPoint[0]
Y_rtkGPS_BaseLink = BaseLinkPoint[1]
YAW_rtkGPS_BaseLink = BaseLinkPoint[2]
#cal the Target point in baselink coordinate system
X_BaseLink_Target=(X_rtkGPS_Target - X_rtkGPS_BaseLink)*math.cos(YAW_rtkGPS_BaseLink)\
+ (Y_rtkGPS_Target - Y_rtkGPS_BaseLink)*math.sin(YAW_rtkGPS_BaseLink)
Y_BaseLink_Target=-(X_rtkGPS_Target - X_rtkGPS_BaseLink)*math.sin(YAW_rtkGPS_BaseLink)\
+ (Y_rtkGPS_Target - Y_rtkGPS_BaseLink)*math.cos(YAW_rtkGPS_BaseLink)
YAW_BaseLink_Target = YAW_rtkGPS_Target - YAW_rtkGPS_BaseLink
Y_BaseLink_Target = -Y_BaseLink_Target
YAW_BaseLink_Target = -YAW_BaseLink_Target
if YAW_BaseLink_Target > 2*math.pi:
YAW_BaseLink_Target = YAW_BaseLink_Target - 2*math.pi
if YAW_BaseLink_Target < -2*math.pi:
YAW_BaseLink_Target = YAW_BaseLink_Target + 2*math.pi
if YAW_BaseLink_Target > math.pi:
YAW_BaseLink_Target = YAW_BaseLink_Target - 2*math.pi
if YAW_BaseLink_Target < -math.pi:
YAW_BaseLink_Target = YAW_BaseLink_Target + 2*math.pi
return [X_BaseLink_Target, Y_BaseLink_Target, YAW_BaseLink_Target]
def BaseLinktortkGPS(self,TargetPoint_BaseLink,BaseLinkPoint): #-- transform the baselink coordinate of the target to the gps coordinate
X_BaseLink_Target_2 = TargetPoint_BaseLink[1]
Y_BaseLink_Target_2 = TargetPoint_BaseLink[0]
YAW_BaseLink_Target_2 = TargetPoint_BaseLink[2]
X_rtkGPS_BaseLink_2 = BaseLinkPoint[0]
Y_rtkGPS_BaseLink_2 = BaseLinkPoint[1]
YAW_rtkGPS_BaseLink_2 = BaseLinkPoint[2]
# X_rtkGPS_Target = (X_BaseLink_Target/math.sin(YAW_rtkGPS_BaseLink)*math.cos(YAW_rtkGPS_BaseLink) - Y_BaseLink_Target)/((math.cos(YAW_rtkGPS_BaseLink))/(math.sin(YAW_rtkGPS_BaseLink))*(math.cos(YAW_rtkGPS_BaseLink)) + math.sin(YAW_rtkGPS_BaseLink)) + X_rtkGPS_BaseLink
# Y_rtkGPS_Target = (X_BaseLink_Target/(math.cos(YAW_rtkGPS_BaseLink))*(math.sin(YAW_rtkGPS_BaseLink)) + Y_BaseLink_Target)/((math.sin(YAW_rtkGPS_BaseLink))/math.cos(YAW_rtkGPS_BaseLink)*math.sin(YAW_rtkGPS_BaseLink) + math.cos(YAW_rtkGPS_BaseLink)) + Y_rtkGPS_BaseLink
# YAW_rtkGPS_Target = YAW_BaseLink_Target + YAW_rtkGPS_BaseLink
#-- transform maybe have problem
X_rtkGPS_Target_2 = X_rtkGPS_BaseLink_2+gps_length_x*math.cos(YAW_rtkGPS_BaseLink_2)+X_BaseLink_Target_2*math.cos(YAW_rtkGPS_BaseLink_2)+Y_BaseLink_Target_2*math.sin(YAW_rtkGPS_BaseLink_2)
Y_rtkGPS_Target_2 = Y_rtkGPS_BaseLink_2+gps_length_x*math.sin(YAW_rtkGPS_BaseLink_2)+X_BaseLink_Target_2*math.sin(YAW_rtkGPS_BaseLink_2)-Y_BaseLink_Target_2*math.cos(YAW_rtkGPS_BaseLink_2)
YAW_rtkGPS_Target_2 = YAW_rtkGPS_BaseLink_2 - YAW_BaseLink_Target_2
return [X_rtkGPS_Target_2,Y_rtkGPS_Target_2,YAW_rtkGPS_Target_2]
def BaseToGPS(self,TargetPoint_BaseLink,BaseLinkPoint): #-- transform the baselink coordinate of the target to the gps coordinate
X_BaseLink_Target_2 = TargetPoint_BaseLink[0]
Y_BaseLink_Target_2 = TargetPoint_BaseLink[1]
YAW_BaseLink_Target_2 = TargetPoint_BaseLink[2]
X_rtkGPS_BaseLink_2 = BaseLinkPoint[0]
Y_rtkGPS_BaseLink_2 = BaseLinkPoint[1]
YAW_rtkGPS_BaseLink_2 = BaseLinkPoint[2]
# X_rtkGPS_Target = (X_BaseLink_Target/math.sin(YAW_rtkGPS_BaseLink)*math.cos(YAW_rtkGPS_BaseLink) - Y_BaseLink_Target)/((math.cos(YAW_rtkGPS_BaseLink))/(math.sin(YAW_rtkGPS_BaseLink))*(math.cos(YAW_rtkGPS_BaseLink)) + math.sin(YAW_rtkGPS_BaseLink)) + X_rtkGPS_BaseLink
# Y_rtkGPS_Target = (X_BaseLink_Target/(math.cos(YAW_rtkGPS_BaseLink))*(math.sin(YAW_rtkGPS_BaseLink)) + Y_BaseLink_Target)/((math.sin(YAW_rtkGPS_BaseLink))/math.cos(YAW_rtkGPS_BaseLink)*math.sin(YAW_rtkGPS_BaseLink) + math.cos(YAW_rtkGPS_BaseLink)) + Y_rtkGPS_BaseLink
# YAW_rtkGPS_Target = YAW_BaseLink_Target + YAW_rtkGPS_BaseLink
X_rtkGPS_Target_2 = X_rtkGPS_BaseLink_2+X_BaseLink_Target_2*math.cos(YAW_rtkGPS_BaseLink_2)+Y_BaseLink_Target_2*math.sin(YAW_rtkGPS_BaseLink_2)
Y_rtkGPS_Target_2 = Y_rtkGPS_BaseLink_2+X_BaseLink_Target_2*math.sin(YAW_rtkGPS_BaseLink_2)-Y_BaseLink_Target_2*math.cos(YAW_rtkGPS_BaseLink_2)
YAW_rtkGPS_Target_2 = YAW_rtkGPS_BaseLink_2 - YAW_BaseLink_Target_2
return [X_rtkGPS_Target_2,Y_rtkGPS_Target_2,YAW_rtkGPS_Target_2]
if __name__ == "__main__":
body = TF()
TargetPoint = [10, 10, math.pi]
BaseLinkPoint = [5, 5, 0]
tmp1 = body.rtkGPStoBaseLink(TargetPoint, BaseLinkPoint)
tmp2 = body.BaseToGPS(tmp1, BaseLinkPoint)
print TargetPoint
print tmp1
print tmp2
| 58.478723
| 277
| 0.745679
| 784
| 5,497
| 4.776786
| 0.096939
| 0.239252
| 0.208812
| 0.081175
| 0.830975
| 0.808011
| 0.777303
| 0.743124
| 0.705741
| 0.68251
| 0
| 0.02152
| 0.171548
| 5,497
| 93
| 278
| 59.107527
| 0.800834
| 0.338184
| 0
| 0.266667
| 0
| 0
| 0.002268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016667
| null | null | 0.05
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1cc7af49827a795fe5a89da7a8407a4326948d9d
| 46,046
|
py
|
Python
|
adaptnlp/transformers/question_answering.py
|
emycooper/adaptnlp
|
2e39f81a7faa4c7cd1d2a3764790cf7bb7ad7469
|
[
"Apache-2.0"
] | 5
|
2020-03-30T12:50:56.000Z
|
2022-01-20T22:45:29.000Z
|
adaptnlp/transformers/question_answering.py
|
emycooper/adaptnlp
|
2e39f81a7faa4c7cd1d2a3764790cf7bb7ad7469
|
[
"Apache-2.0"
] | 9
|
2020-11-13T18:41:44.000Z
|
2022-02-10T01:58:28.000Z
|
adaptnlp/transformers/question_answering.py
|
emycooper/adaptnlp
|
2e39f81a7faa4c7cd1d2a3764790cf7bb7ad7469
|
[
"Apache-2.0"
] | 1
|
2020-03-30T17:29:05.000Z
|
2020-03-30T17:29:05.000Z
|
# Contains code used/modified by AdaptNLP author from transformers
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from typing import List, Union, Tuple
import collections
from collections import OrderedDict
import torch
from torch.utils.data import DataLoader, SequentialSampler, TensorDataset
from transformers import (
BertConfig,
BertForQuestionAnswering,
BertTokenizer,
XLMConfig,
XLMForQuestionAnswering,
XLMTokenizer,
XLNetConfig,
XLNetForQuestionAnswering,
XLNetTokenizer,
)
from adaptnlp.transformers.utils_squad import (
SquadExample,
InputFeatures,
convert_examples_to_features,
RawResult,
RawResultExtended,
get_final_text,
_get_best_indexes,
_compute_softmax,
)
class QuestionAnsweringModel(ABC):
@abstractmethod
def __init__(self):
super().__init__()
self.config
self.tokenizer
self.model
@abstractmethod
def _load(self):
raise NotImplementedError
@abstractmethod
def predict(self, query, context, top_n, as_dict):
raise NotImplementedError
# TODO To be deprecated in the near future for a better module design
class BertQuestionAnsweringModel(QuestionAnsweringModel):
def __init__(self):
self.config = BertConfig
self.tokenizer = BertTokenizer
self.model = BertForQuestionAnswering
self.model_names = list(self.config.pretrained_config_archive_map.keys())
# Post Load
self.pretrained_config = None
self.pretrained_tokenizer = None
self.pretrained_model = None
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def _to_list(self, tensor: torch.Tensor) -> List[float]:
return tensor.detach().cpu().tolist()
def _load(self) -> None:
print("Loading Pretrained Bert Question Answering Model...")
model_name = "bert-large-uncased-whole-word-masking-finetuned-squad"
self.pretrained_config = self.config.from_pretrained(
"bert-large-uncased-whole-word-masking-finetuned-squad"
)
if "uncased" in model_name:
tokenizer = self.tokenizer.from_pretrained(
"bert-large-uncased", do_lower_case=True
)
else:
tokenizer = self.tokenizer.from_pretrained(
"bert-large-cased", do_lower_case=False
)
self.pretrained_tokenizer = tokenizer
model = self.model.from_pretrained(
model_name,
from_tf=bool(".ckpt" in model_name),
config=self.pretrained_config,
)
self.pretrained_model = model
self.pretrained_model.to(self.device)
def _load_one_query(
self, query: str, context: str, output_examples=True
) -> Union[TensorDataset, List[SquadExample], List[InputFeatures]]:
# Create doc_tokens for SquadExample with one query and context
def is_whitespace(c):
if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
return True
return False
# Create doc_tokens
doc_tokens = []
prev_is_whitespace = True
for c in context:
if is_whitespace(c):
prev_is_whitespace = True
else:
if prev_is_whitespace:
doc_tokens.append(c)
else:
doc_tokens[-1] += c
prev_is_whitespace = False
# Create SquadExample
examples = []
example = SquadExample(
qas_id=None,
question_text=query,
doc_tokens=doc_tokens,
orig_answer_text=None,
start_position=None,
end_position=None,
is_impossible=False,
)
examples.append(example)
# Convert to features
features = convert_examples_to_features(
examples=examples,
tokenizer=self.pretrained_tokenizer,
max_seq_length=384,
doc_stride=128,
max_query_length=64,
is_training=False,
)
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor(
[f.input_mask for f in features], dtype=torch.long
)
all_segment_ids = torch.tensor(
[f.segment_ids for f in features], dtype=torch.long
)
all_cls_index = torch.tensor([f.cls_index for f in features], dtype=torch.long)
all_p_mask = torch.tensor([f.p_mask for f in features], dtype=torch.float)
all_example_index = torch.arange(all_input_ids.size(0), dtype=torch.long)
dataset = TensorDataset(
all_input_ids,
all_input_mask,
all_segment_ids,
all_example_index,
all_cls_index,
all_p_mask,
)
if output_examples:
return dataset, examples, features
return dataset
def _produce_concrete_predictions(
self,
all_examples,
all_features,
all_results,
n_best_size=10,
max_answer_length=30,
do_lower_case=True,
verbose_logging=False,
version_2_with_negative=True,
null_score_diff_threshold=0.0,
):
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name
"PrelimPrediction",
["feature_index", "start_index", "end_index", "start_logit", "end_logit"],
)
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
scores_diff_json = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = []
# keep track of the minimum score of null start+end of position 0
score_null = 1000000 # large and positive
min_null_feature_index = 0 # the paragraph slice with min null score
null_start_logit = 0 # the start logit at the slice with min null score
null_end_logit = 0 # the end logit at the slice with min null score
for (feature_index, feature) in enumerate(features):
result = unique_id_to_result[feature.unique_id]
start_indexes = _get_best_indexes(result.start_logits, n_best_size)
end_indexes = _get_best_indexes(result.end_logits, n_best_size)
# if we could have irrelevant answers, get the min score of irrelevant
if version_2_with_negative:
feature_null_score = result.start_logits[0] + result.end_logits[0]
if feature_null_score < score_null:
score_null = feature_null_score
min_null_feature_index = feature_index
null_start_logit = result.start_logits[0]
null_end_logit = result.end_logits[0]
for start_index in start_indexes:
for end_index in end_indexes:
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= len(feature.tokens):
continue
if end_index >= len(feature.tokens):
continue
if start_index not in feature.token_to_orig_map:
continue
if end_index not in feature.token_to_orig_map:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_logit=result.start_logits[start_index],
end_logit=result.end_logits[end_index],
)
)
if version_2_with_negative:
prelim_predictions.append(
_PrelimPrediction(
feature_index=min_null_feature_index,
start_index=0,
end_index=0,
start_logit=null_start_logit,
end_logit=null_end_logit,
)
)
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_logit + x.end_logit),
reverse=True,
)
_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name
"NbestPrediction",
["text", "start_index", "end_index", "start_logit", "end_logit"],
) # ### start_end_index
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
orig_doc_start = 0
orig_doc_end = 0
if pred.start_index > 0: # this is a non-null prediction
tok_tokens = feature.tokens[pred.start_index : (pred.end_index + 1)]
orig_doc_start = feature.token_to_orig_map[pred.start_index]
orig_doc_end = feature.token_to_orig_map[pred.end_index]
orig_tokens = example.doc_tokens[
orig_doc_start : (orig_doc_end + 1)
]
tok_text = " ".join(tok_tokens)
# De-tokenize WordPieces that have been split off.
tok_text = tok_text.replace(" ##", "")
tok_text = tok_text.replace("##", "")
# Clean whitespace
tok_text = tok_text.strip()
tok_text = " ".join(tok_text.split())
orig_text = " ".join(orig_tokens)
final_text = get_final_text(
tok_text, orig_text, do_lower_case, verbose_logging
)
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
else:
final_text = ""
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_logit=pred.start_logit,
end_logit=pred.end_logit,
start_index=orig_doc_start,
end_index=orig_doc_end,
)
) # ### start_end_index...Make span indices inclusive
# if we didn't include the empty option in the n-best, include it
if version_2_with_negative:
if "" not in seen_predictions:
nbest.append(
_NbestPrediction(
text="",
start_logit=null_start_logit,
end_logit=null_end_logit,
start_index=0,
end_index=0,
)
) # ### start_end_index should this be pred.<index>
# In very rare edge cases we could only have single null prediction.
# So we just create a nonce prediction in this case to avoid failure.
if len(nbest) == 1:
nbest.insert(
0,
_NbestPrediction(
text="empty",
start_logit=0.0,
end_logit=0.0,
start_index=0.0,
end_index=0.0,
),
) # ### start_end_index
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(
_NbestPrediction(
text="empty",
start_logit=0.0,
end_logit=0.0,
start_index=0.0,
end_index=0.0,
)
) # ### start_end_index
assert len(nbest) >= 1
total_scores = []
best_non_null_entry = None
for entry in nbest:
total_scores.append(entry.start_logit + entry.end_logit)
if not best_non_null_entry:
if entry.text:
best_non_null_entry = entry
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_logit"] = entry.start_logit
output["end_logit"] = entry.end_logit
output[
"start_index"
] = (
entry.start_index
) # ### start_end_index MAGIC NUMBERS for adjustment :/
output["end_index"] = entry.end_index
nbest_json.append(output)
assert len(nbest_json) >= 1
if not version_2_with_negative:
all_predictions[example.qas_id] = nbest_json[0]["text"]
else:
# predict "" iff the null score - the score of best non-null > threshold
score_diff = (
score_null
- best_non_null_entry.start_logit
- (best_non_null_entry.end_logit)
)
scores_diff_json[example.qas_id] = score_diff
if score_diff > null_score_diff_threshold:
all_predictions[example.qas_id] = ""
else:
all_predictions[example.qas_id] = best_non_null_entry.text
all_nbest_json[example.qas_id] = nbest_json
# All ids set as None so get rid of None Key
all_predictions = all_predictions[None]
all_nbest_json = all_nbest_json[None]
return all_predictions, all_nbest_json
def predict(
self, query: str, context: str, n_best_size: int = 20
) -> Tuple[str, List[OrderedDict]]:
""" Predicts top_n answer spans of query in regards to context
Args:
query: The question
context: The context of which the question is asking
top_n: The top n answers returned
Returns:
Either a list of string answers or a dict of the results
"""
self._load() if not self.pretrained_model or not self.pretrained_tokenizer else None
# Load and Evaluate Context Queries
dataset, examples, features = self._load_one_query(query, context)
eval_sampler = SequentialSampler(dataset)
eval_dataloader = DataLoader(dataset, sampler=eval_sampler, batch_size=5)
all_results = []
for batch in eval_dataloader:
self.pretrained_model.eval()
batch = tuple(t.to(self.device) for t in batch)
with torch.no_grad():
# BERT XLM XLNET DIFFERENCE
inputs = {
"input_ids": batch[0],
"attention_mask": batch[1],
"token_type_ids": batch[2],
}
example_indices = batch[3]
outputs = self.pretrained_model(**inputs)
for i, example_index in enumerate(example_indices):
eval_feature = features[example_index.item()]
unique_id = int(eval_feature.unique_id)
# BERT XLM XLNET DIFFERENCE
result = RawResult(
unique_id=unique_id,
start_logits=self._to_list(outputs[0][i]),
end_logits=self._to_list(outputs[1][i]),
)
all_results.append(result)
# Obtain Concrete Predictions
all_predictions, all_nbest_json = self._produce_concrete_predictions(
examples, features, all_results, n_best_size=n_best_size
)
return all_predictions, all_nbest_json
class XLNetQuestionAnsweringModel(QuestionAnsweringModel):
def __init__(self):
self.config = XLNetConfig
self.tokenizer = XLNetTokenizer
self.model = XLNetForQuestionAnswering
self.model_names = list(self.config.pretrained_config_archive_map.keys())
# Post Load
self.pretrained_config = None
self.pretrained_tokenizer = None
self.pretrained_model = None
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def _to_list(self, tensor: torch.Tensor) -> List[float]:
return tensor.detach().cpu().tolist()
def _load(self) -> None:
print("Loading Pretrained XLNet Question Answering Model...")
model_name = "xlnet-large-cased"
self.pretrained_config = self.config.from_pretrained("xlnet-large-cased")
tokenizer = self.tokenizer.from_pretrained(
"xlnet-large-cased", do_lower_case=False
)
self.pretrained_tokenizer = tokenizer
model = self.model.from_pretrained(
model_name,
from_tf=bool(".ckpt" in model_name),
config=self.pretrained_config,
)
self.pretrained_model = model
self.pretrained_model.to(self.device)
def _load_one_query(
self, query: str, context: str, output_examples=True
) -> Union[TensorDataset, List[SquadExample], List[InputFeatures]]:
# Create doc_tokens for SquadExample with one query and context
def is_whitespace(c):
if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
return True
return False
# Create doc_tokens
doc_tokens = []
prev_is_whitespace = True
for c in context:
if is_whitespace(c):
prev_is_whitespace = True
else:
if prev_is_whitespace:
doc_tokens.append(c)
else:
doc_tokens[-1] += c
prev_is_whitespace = False
# Create SquadExample
examples = []
example = SquadExample(
qas_id=None,
question_text=query,
doc_tokens=doc_tokens,
orig_answer_text=None,
start_position=None,
end_position=None,
is_impossible=False,
)
examples.append(example)
# Convert to features
features = convert_examples_to_features(
examples=examples,
tokenizer=self.pretrained_tokenizer,
max_seq_length=384,
doc_stride=128,
max_query_length=64,
is_training=False,
)
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor(
[f.input_mask for f in features], dtype=torch.long
)
all_segment_ids = torch.tensor(
[f.segment_ids for f in features], dtype=torch.long
)
all_cls_index = torch.tensor([f.cls_index for f in features], dtype=torch.long)
all_p_mask = torch.tensor([f.p_mask for f in features], dtype=torch.float)
all_example_index = torch.arange(all_input_ids.size(0), dtype=torch.long)
dataset = TensorDataset(
all_input_ids,
all_input_mask,
all_segment_ids,
all_example_index,
all_cls_index,
all_p_mask,
)
if output_examples:
return dataset, examples, features
return dataset
def _produce_concrete_predictions(
self,
all_examples,
all_features,
all_results,
n_best_size=10,
max_answer_length=30,
verbose_logging=False,
):
start_n_top = self.pretrained_model.config.start_n_top
end_n_top = self.pretrained_model.config.end_n_top
tokenizer = self.pretrained_tokenizer
_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name
"PrelimPrediction",
[
"feature_index",
"start_index",
"end_index",
"start_log_prob",
"end_log_prob",
],
)
_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name
"NbestPrediction", ["text", "start_log_prob", "end_log_prob"]
)
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
scores_diff_json = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = []
# keep track of the minimum score of null start+end of position 0
score_null = 1000000 # large and positive
for (feature_index, feature) in enumerate(features):
result = unique_id_to_result[feature.unique_id]
cur_null_score = result.cls_logits
# if we could have irrelevant answers, get the min score of irrelevant
score_null = min(score_null, cur_null_score)
for i in range(start_n_top):
for j in range(end_n_top):
start_log_prob = result.start_top_log_probs[i]
start_index = result.start_top_index[i]
j_index = i * end_n_top + j
end_log_prob = result.end_top_log_probs[j_index]
end_index = result.end_top_index[j_index]
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= feature.paragraph_len - 1:
continue
if end_index >= feature.paragraph_len - 1:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_log_prob=start_log_prob,
end_log_prob=end_log_prob,
)
)
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_log_prob + x.end_log_prob),
reverse=True,
)
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
# XLNet un-tokenizer
# Let's keep it simple for now and see if we need all this later.
#
# tok_start_to_orig_index = feature.tok_start_to_orig_index
# tok_end_to_orig_index = feature.tok_end_to_orig_index
# start_orig_pos = tok_start_to_orig_index[pred.start_index]
# end_orig_pos = tok_end_to_orig_index[pred.end_index]
# paragraph_text = example.paragraph_text
# final_text = paragraph_text[start_orig_pos: end_orig_pos + 1].strip()
# Previously used Bert untokenizer
tok_tokens = feature.tokens[pred.start_index : (pred.end_index + 1)]
orig_doc_start = feature.token_to_orig_map[pred.start_index]
orig_doc_end = feature.token_to_orig_map[pred.end_index]
orig_tokens = example.doc_tokens[orig_doc_start : (orig_doc_end + 1)]
tok_text = tokenizer.convert_tokens_to_string(tok_tokens)
# Clean whitespace
tok_text = tok_text.strip()
tok_text = " ".join(tok_text.split())
orig_text = " ".join(orig_tokens)
final_text = get_final_text(
tok_text, orig_text, tokenizer.do_lower_case, verbose_logging
)
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_log_prob=pred.start_log_prob,
end_log_prob=pred.end_log_prob,
)
)
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(
_NbestPrediction(text="", start_log_prob=-1e6, end_log_prob=-1e6)
)
total_scores = []
best_non_null_entry = None
for entry in nbest:
total_scores.append(entry.start_log_prob + entry.end_log_prob)
if not best_non_null_entry:
best_non_null_entry = entry
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_log_prob"] = entry.start_log_prob
output["end_log_prob"] = entry.end_log_prob
nbest_json.append(output)
assert len(nbest_json) >= 1
assert best_non_null_entry is not None
score_diff = score_null
scores_diff_json[example.qas_id] = score_diff
# note(zhiliny): always predict best_non_null_entry
# and the evaluation script will search for the best threshold
all_predictions[example.qas_id] = best_non_null_entry.text
all_nbest_json[example.qas_id] = nbest_json
"""
if version_2_with_negative:
with open(output_null_log_odds_file, "w") as writer:
writer.write(json.dumps(scores_diff_json, indent=4) + "\n")
with open(orig_data_file, "r", encoding='utf-8') as reader:
orig_data = json.load(reader)["data"]
"""
# All ids set as None so get rid of None Key
all_predictions = all_predictions[None]
all_nbest_json = all_nbest_json[None]
return all_predictions, all_nbest_json
def predict(
self, query: str, context: str, n_best_size: int = 20, as_dict: bool = False
) -> Union[List[str], dict]:
""" Predicts top_n answer spans of query in regards to context
Args:
query: The question
context: The context of which the question is asking
top_n: The top n answers returned
as_dict: Returns answer in dict format if True
Returns:
Either a list of string answers or a dict of the results
"""
self._load() if not self.pretrained_model or not self.pretrained_tokenizer else None
# Load and Evaluate Context Queries
dataset, examples, features = self._load_one_query(query, context)
eval_sampler = SequentialSampler(dataset)
eval_dataloader = DataLoader(dataset, sampler=eval_sampler, batch_size=5)
all_results = []
for batch in eval_dataloader:
self.pretrained_model.eval()
batch = tuple(t.to(self.device) for t in batch)
with torch.no_grad():
# BERT XLM XLNET DIFFERENCE
inputs = {
"input_ids": batch[0],
"attention_mask": batch[1],
"token_type_ids": batch[2],
"cls_index": batch[4],
"p_mask": batch[5],
}
example_indices = batch[3]
outputs = self.pretrained_model(**inputs)
for i, example_index in enumerate(example_indices):
eval_feature = features[example_index.item()]
unique_id = int(eval_feature.unique_id)
# BERT XLM XLNET DIFFERENCE
result = RawResultExtended(
unique_id=unique_id,
start_top_log_probs=self._to_list(outputs[0][i]),
start_top_index=self._to_list(outputs[1][i]),
end_top_log_probs=self._to_list(outputs[2][i]),
end_top_index=self._to_list(outputs[3][i]),
cls_logits=self._to_list(outputs[4][i]),
)
all_results.append(result)
# Obtain Concrete Predictions
all_predictions, all_nbest_json = self._produce_concrete_predictions(
examples, features, all_results, n_best_size=n_best_size
)
return all_predictions, all_nbest_json
class XLMQuestionAnsweringModel(QuestionAnsweringModel):
def __init__(self):
self.config = XLMConfig
self.tokenizer = XLMTokenizer
self.model = XLMForQuestionAnswering
self.model_names = list(self.config.pretrained_config_archive_map.keys())
# Post Load
self.pretrained_config = None
self.pretrained_tokenizer = None
self.pretrained_model = None
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def _to_list(self, tensor: torch.Tensor) -> List[float]:
return tensor.detach().cpu().tolist()
def _load(self) -> None:
print("Loading Pretrained XLNet Question Answering Model...")
model_name = "xlm-mlm-en-2048"
self.pretrained_config = self.config.from_pretrained("xlm-mlm-en-2048")
tokenizer = self.tokenizer.from_pretrained(
"xlm-mlm-en-2048", do_lower_case=False
)
self.pretrained_tokenizer = tokenizer
model = self.model.from_pretrained(
model_name,
from_tf=bool(".ckpt" in model_name),
config=self.pretrained_config,
)
self.pretrained_model = model
self.pretrained_model.to(self.device)
def _load_one_query(
self, query: str, context: str, output_examples=True
) -> Union[TensorDataset, List[SquadExample], List[InputFeatures]]:
# Create doc_tokens for SquadExample with one query and context
def is_whitespace(c):
if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
return True
return False
# Create doc_tokens
doc_tokens = []
prev_is_whitespace = True
for c in context:
if is_whitespace(c):
prev_is_whitespace = True
else:
if prev_is_whitespace:
doc_tokens.append(c)
else:
doc_tokens[-1] += c
prev_is_whitespace = False
# Create SquadExample
examples = []
example = SquadExample(
qas_id=None,
question_text=query,
doc_tokens=doc_tokens,
orig_answer_text=None,
start_position=None,
end_position=None,
is_impossible=False,
)
examples.append(example)
# Convert to features
features = convert_examples_to_features(
examples=examples,
tokenizer=self.pretrained_tokenizer,
max_seq_length=384,
doc_stride=128,
max_query_length=64,
is_training=False,
)
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor(
[f.input_mask for f in features], dtype=torch.long
)
all_segment_ids = torch.tensor(
[f.segment_ids for f in features], dtype=torch.long
)
all_cls_index = torch.tensor([f.cls_index for f in features], dtype=torch.long)
all_p_mask = torch.tensor([f.p_mask for f in features], dtype=torch.float)
all_example_index = torch.arange(all_input_ids.size(0), dtype=torch.long)
dataset = TensorDataset(
all_input_ids,
all_input_mask,
all_segment_ids,
all_example_index,
all_cls_index,
all_p_mask,
)
if output_examples:
return dataset, examples, features
return dataset
def _produce_concrete_predictions(
self,
all_examples,
all_features,
all_results,
n_best_size=10,
max_answer_length=30,
verbose_logging=False,
):
start_n_top = self.pretrained_model.config.start_n_top
end_n_top = self.pretrained_model.config.end_n_top
tokenizer = self.pretrained_tokenizer
_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name
"PrelimPrediction",
[
"feature_index",
"start_index",
"end_index",
"start_log_prob",
"end_log_prob",
],
)
_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name
"NbestPrediction", ["text", "start_log_prob", "end_log_prob"]
)
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
scores_diff_json = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = []
# keep track of the minimum score of null start+end of position 0
score_null = 1000000 # large and positive
for (feature_index, feature) in enumerate(features):
result = unique_id_to_result[feature.unique_id]
cur_null_score = result.cls_logits
# if we could have irrelevant answers, get the min score of irrelevant
score_null = min(score_null, cur_null_score)
for i in range(start_n_top):
for j in range(end_n_top):
start_log_prob = result.start_top_log_probs[i]
start_index = result.start_top_index[i]
j_index = i * end_n_top + j
end_log_prob = result.end_top_log_probs[j_index]
end_index = result.end_top_index[j_index]
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= feature.paragraph_len - 1:
continue
if end_index >= feature.paragraph_len - 1:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_log_prob=start_log_prob,
end_log_prob=end_log_prob,
)
)
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_log_prob + x.end_log_prob),
reverse=True,
)
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
# XLNet un-tokenizer
# Let's keep it simple for now and see if we need all this later.
#
# tok_start_to_orig_index = feature.tok_start_to_orig_index
# tok_end_to_orig_index = feature.tok_end_to_orig_index
# start_orig_pos = tok_start_to_orig_index[pred.start_index]
# end_orig_pos = tok_end_to_orig_index[pred.end_index]
# paragraph_text = example.paragraph_text
# final_text = paragraph_text[start_orig_pos: end_orig_pos + 1].strip()
# Previously used Bert untokenizer
tok_tokens = feature.tokens[pred.start_index : (pred.end_index + 1)]
orig_doc_start = feature.token_to_orig_map[pred.start_index]
orig_doc_end = feature.token_to_orig_map[pred.end_index]
orig_tokens = example.doc_tokens[orig_doc_start : (orig_doc_end + 1)]
tok_text = tokenizer.convert_tokens_to_string(tok_tokens)
# Clean whitespace
tok_text = tok_text.strip()
tok_text = " ".join(tok_text.split())
orig_text = " ".join(orig_tokens)
final_text = get_final_text(
tok_text,
orig_text,
tokenizer, # .do_lower_case, (a XLM problem?)
verbose_logging,
)
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_log_prob=pred.start_log_prob,
end_log_prob=pred.end_log_prob,
)
)
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(
_NbestPrediction(text="", start_log_prob=-1e6, end_log_prob=-1e6)
)
total_scores = []
best_non_null_entry = None
for entry in nbest:
total_scores.append(entry.start_log_prob + entry.end_log_prob)
if not best_non_null_entry:
best_non_null_entry = entry
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_log_prob"] = entry.start_log_prob
output["end_log_prob"] = entry.end_log_prob
nbest_json.append(output)
assert len(nbest_json) >= 1
assert best_non_null_entry is not None
score_diff = score_null
scores_diff_json[example.qas_id] = score_diff
# note(zhiliny): always predict best_non_null_entry
# and the evaluation script will search for the best threshold
all_predictions[example.qas_id] = best_non_null_entry.text
all_nbest_json[example.qas_id] = nbest_json
"""
if version_2_with_negative:
with open(output_null_log_odds_file, "w") as writer:
writer.write(json.dumps(scores_diff_json, indent=4) + "\n")
with open(orig_data_file, "r", encoding='utf-8') as reader:
orig_data = json.load(reader)["data"]
"""
# All ids set as None so get rid of None Key
all_predictions = all_predictions[None]
all_nbest_json = all_nbest_json[None]
return all_predictions, all_nbest_json
def predict(
self, query: str, context: str, n_best_size: int = 20, as_dict: bool = False
) -> Union[List[str], dict]:
""" Predicts top_n answer spans of query in regards to context
Args:
query: The question
context: The context of which the question is asking
top_n: The top n answers returned
as_dict: Returns answer in dict format if True
Returns:
Either a list of string answers or a dict of the results
"""
self._load() if not self.pretrained_model or not self.pretrained_tokenizer else None
# Load and Evaluate Context Queries
dataset, examples, features = self._load_one_query(query, context)
eval_sampler = SequentialSampler(dataset)
eval_dataloader = DataLoader(dataset, sampler=eval_sampler, batch_size=5)
all_results = []
for batch in eval_dataloader:
self.pretrained_model.eval()
batch = tuple(t.to(self.device) for t in batch)
with torch.no_grad():
# BERT XLM XLNET DIFFERENCE
inputs = {
"input_ids": batch[0],
"attention_mask": batch[1],
"token_type_ids": batch[2],
"cls_index": batch[4],
"p_mask": batch[5],
}
example_indices = batch[3]
outputs = self.pretrained_model(**inputs)
for i, example_index in enumerate(example_indices):
eval_feature = features[example_index.item()]
unique_id = int(eval_feature.unique_id)
# BERT XLM XLNET DIFFERENCE
result = RawResultExtended(
unique_id=unique_id,
start_top_log_probs=self._to_list(outputs[0][i]),
start_top_index=self._to_list(outputs[1][i]),
end_top_log_probs=self._to_list(outputs[2][i]),
end_top_index=self._to_list(outputs[3][i]),
cls_logits=self._to_list(outputs[4][i]),
)
all_results.append(result)
# Obtain Concrete Predictions
all_predictions, all_nbest_json = self._produce_concrete_predictions(
examples, features, all_results, n_best_size=n_best_size
)
return all_predictions, all_nbest_json
| 38.824621
| 92
| 0.558203
| 5,028
| 46,046
| 4.816428
| 0.081543
| 0.013875
| 0.011892
| 0.011892
| 0.875996
| 0.856547
| 0.840154
| 0.829335
| 0.818433
| 0.816782
| 0
| 0.00732
| 0.371042
| 46,046
| 1,185
| 93
| 38.857384
| 0.828873
| 0.130109
| 0
| 0.767549
| 0
| 0
| 0.028832
| 0.002712
| 0
| 0
| 0.000461
| 0.000844
| 0.006904
| 1
| 0.027618
| false
| 0
| 0.009206
| 0.003452
| 0.065593
| 0.003452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cd11a30bab385592db8d12a49b97220556cee4d
| 45,660
|
py
|
Python
|
tests/test_bot.py
|
trussworks/glossary-bot
|
ad9a0a9171f3057a4d0d7da33790373e6a0de99f
|
[
"MIT"
] | 2
|
2019-11-02T22:01:46.000Z
|
2021-05-11T19:07:14.000Z
|
tests/test_bot.py
|
trussworks/glossary-bot
|
ad9a0a9171f3057a4d0d7da33790373e6a0de99f
|
[
"MIT"
] | 1
|
2019-10-29T20:15:51.000Z
|
2019-10-29T20:15:51.000Z
|
tests/test_bot.py
|
trussworks/glossary-bot
|
ad9a0a9171f3057a4d0d7da33790373e6a0de99f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf8 -*-
import unittest
import json
import responses
from flask import current_app
from gloss.models import Definition, Interaction
from tests.test_base import TestBase
class TestBot(TestBase):
def setUp(self):
super(TestBot, self).setUp()
self.db.create_all()
def test_app_exists(self):
''' The app exists
'''
self.assertFalse(current_app is None)
def test_unauthorized_access(self):
''' The app rejects unauthorized access
'''
robo_response = self.client.post('/', data={'token': 'woofer_token'})
self.assertEqual(robo_response.status_code, 401)
def test_authorized_access(self):
''' The app accepts authorized access
'''
robo_response = self.post_command(text="")
self.assertEqual(robo_response.status_code, 200)
def test_set_definition(self):
''' A definition set via a POST is recorded in the database
'''
robo_response = self.post_command(text="EW = Eligibility Worker")
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
def test_set_definition_with_lots_of_whitespace(self):
''' Excess whitespace is trimmed when parsing the set command.
'''
robo_response = self.post_command(text=" EW = Eligibility Worker ")
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
def test_set_definition_with_multiple_equals_signs(self):
''' A set with multiple equals signs considers all equals signs after
the first to be part of the definition
'''
robo_response = self.post_command(text="EW = Eligibility Worker = Cool Person=Yeah")
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker = Cool Person=Yeah")
def test_reset_definition(self):
''' Setting a definition for an existing term overwrites the original
'''
robo_response = self.post_command(text="EW = Eligibility Worker")
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
robo_response = self.post_command(text="EW = Egg Weathervane")
self.assertTrue("overwriting the previous entry".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Egg Weathervane")
def test_set_same_word_with_different_capitalization(self):
''' We can't set different definitions for the same word by using different cases
'''
robo_response = self.post_command(text="lower case = NOT UPPER CASE")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "lower case"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "lower case")
self.assertEqual(definition_check.definition, "NOT UPPER CASE")
robo_response = self.post_command(text="LOWER CASE = really not upper case")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("overwriting the previous entry".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="shh lower case")
self.assertTrue("LOWER CASE: really not upper case".encode('utf-8') in robo_response.data)
def test_set_identical_definition(self):
''' Correct response for setting an identical definition for an existing term
'''
robo_response = self.post_command(text="EW = Eligibility Worker")
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
robo_response = self.post_command(text="EW = Eligibility Worker")
self.assertTrue("already knows that the definition for".encode('utf-8') in robo_response.data)
def test_set_command_word_definitions(self):
''' We can successfully set definitions for unreserved command words.
'''
robo_response = self.post_command(text="SHH = Sonic Hedge Hog")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "SHH"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "SHH")
self.assertEqual(definition_check.definition, "Sonic Hedge Hog")
robo_response = self.post_command(text="SSH = Secure SHell")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "SSH"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "SSH")
self.assertEqual(definition_check.definition, "Secure SHell")
robo_response = self.post_command(text="Delete = Remove or Obliterate")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "Delete"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "Delete")
self.assertEqual(definition_check.definition, "Remove or Obliterate")
robo_response = self.post_command(text="help me = I'm in hell")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("has set the definition".encode('utf-8') in robo_response.data)
filter = Definition.term == "help me"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "help me")
self.assertEqual(definition_check.definition, "I'm in hell")
def test_failed_set_command_word_definitions(self):
''' We can't successfully set definitions for reserved command words.
'''
robo_response = self.post_command(text="Stats = Statistics")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("because it's a reserved term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="help = aid")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("because it's a reserved term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="LeArNiNgS = recently")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("because it's a reserved term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="? = riddle me this")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("because it's a reserved term".encode('utf-8') in robo_response.data)
@responses.activate
def test_get_definition(self):
''' We can succesfully set and get a definition from the bot
'''
# set & test a definition
self.post_command(text="EW = Eligibility Worker")
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="EW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss EW" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], "EW")
self.assertEqual(attachment['text'], "Eligibility Worker")
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, "glossie")
self.assertEqual(interaction_check.term, "EW")
self.assertEqual(interaction_check.action, "found")
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_get_definition_with_special_characters(self):
''' We can succesfully set and get a definition with special characters from the bot
'''
# set & test a definition
self.post_command(text="EW = ™¥∑ø∂∆∫")
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "™¥∑ø∂∆∫")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="EW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss EW" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], "EW")
self.assertEqual(attachment['text'], "™¥∑ø∂∆∫")
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, "glossie")
self.assertEqual(interaction_check.term, "EW")
self.assertEqual(interaction_check.action, "found")
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
def test_request_nonexistent_definition(self):
''' Test requesting a non-existent definition
'''
# send a POST to the bot to request the definition
robo_response = self.post_command(text="EW")
self.assertTrue("has no definition for".encode('utf-8') in robo_response.data)
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, "glossie")
self.assertEqual(interaction_check.term, "EW")
self.assertEqual(interaction_check.action, "not_found")
@responses.activate
def test_get_definition_with_image(self):
''' We can get a properly formatted definition with an image from the bot
'''
# set & test a definition
self.post_command(text="EW = http://example.com/ew.gif")
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "http://example.com/ew.gif")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="EW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss EW" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], "EW")
self.assertEqual(attachment['text'], "http://example.com/ew.gif")
self.assertEqual(attachment['image_url'], "http://example.com/ew.gif")
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_set_alias(self):
''' An alias can be set for a definition
'''
# set & test a definition and some aliases
original_term = "Glossary Bot"
first_alias = "Gloss Bot"
second_alias = "Glossbot"
definition = "A Slack bot that maintains a glossary of terms created by its users, and responds to requests with definitions."
self.post_command(text="{original_term} = {definition}".format(**locals()))
self.post_command(text="{first_alias} = see {original_term}".format(**locals()))
self.post_command(text="{second_alias} = see also {original_term}".format(**locals()))
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
# ask for the original definition
rsp = self.post_command(text=original_term)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], original_term)
self.assertEqual(attachment['text'], definition)
# ask for the first alias
rsp = self.post_command(text=first_alias)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[1].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], original_term)
self.assertEqual(attachment['text'], definition)
# ask for the second alias
rsp = self.post_command(text=second_alias)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[2].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], original_term)
self.assertEqual(attachment['text'], definition)
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
def test_delete_definition(self):
''' A definition can be deleted from the database
'''
# first set a value in the database and verify that it's there
self.post_command(text="EW = Eligibility Worker")
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
# now delete the value and verify that it's gone
robo_response = self.post_command(text="delete EW")
self.assertTrue("has deleted the definition for".encode('utf-8') in robo_response.data)
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNone(definition_check)
@responses.activate
def test_get_stats(self):
''' Stats are properly returned by the bot
'''
# set and get a definition to generate some stats
self.post_command(text="EW = Eligibility Worker")
self.post_command(text="shh EW")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="stats")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss stats" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue("I have definitions for 1 term" in attachment['text'])
self.assertTrue("1 person has defined terms" in attachment['text'])
self.assertTrue("I've been asked for definitions 1 time" in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_get_stats_on_empty_database(self):
''' A coherent message is returned when requesting stats on an empty database
'''
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="stats")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss stats" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue("I don't have any definitions" in attachment['text'])
self.assertTrue("Nobody has defined terms" in attachment['text'])
self.assertTrue("Nobody has asked me for definitions" in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_get_learnings(self):
''' Learnings are properly returned by the bot
'''
# set some values in the database
letters = ["K", "L", "M", "N", "Ó", "P", "Q", "R", "S", "T", "U", "V"]
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="learnings")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue("glossie" in payload['text'])
self.assertTrue("gloss learnings" in payload['text'])
self.assertEqual(payload['channel'], "123456")
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue("I recently learned definitions for" in attachment['text'])
self.assertTrue("KW" in attachment['text'])
self.assertTrue("LW" in attachment['text'])
self.assertTrue("MW" in attachment['text'])
self.assertTrue("NW" in attachment['text'])
self.assertTrue("ÓW" in attachment['text'])
self.assertTrue("PW" in attachment['text'])
self.assertTrue("QW" in attachment['text'])
self.assertTrue("RW" in attachment['text'])
self.assertTrue("SW" in attachment['text'])
self.assertTrue("TW" in attachment['text'])
self.assertTrue("UW" in attachment['text'])
self.assertTrue("VW" in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
def test_random_learnings(self):
''' Learnings are returned in random order when requested
'''
# set some values in the database
letters = ["E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S"]
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
# get chronological learnings
robo_response = self.post_command(text="shh learnings")
self.assertEqual(robo_response.status_code, 200)
control = robo_response.data
# get a few random learnings
robo_response = self.post_command(text="shh learnings random")
self.assertEqual(robo_response.status_code, 200)
random1 = robo_response.data
robo_response = self.post_command(text="shh learnings random")
self.assertEqual(robo_response.status_code, 200)
random2 = robo_response.data
robo_response = self.post_command(text="shh learnings random")
self.assertEqual(robo_response.status_code, 200)
random3 = robo_response.data
# if they're all equal, we've failed
self.assertFalse(control == random1 and control == random2 and control == random3)
def test_alphabetical_learnings(self):
''' Learnings are returned in random order when requested
'''
# set some values in the database
letters = ["E", "G", "I", "K", "M", "O", "Q", "S", "R", "P", "N", "L", "J", "H", "F"]
check = []
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
check.insert(0, "{}W".format(letter))
desc_check = check[:12]
alpha_check = list(check)
alpha_check.sort()
alpha_check = alpha_check[:12]
# get chronological learnings
robo_response = self.post_command(text="shh learnings")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(desc_check).encode('utf-8') in robo_response.data)
# get alphabetical learnings
robo_response = self.post_command(text="shh learnings alpha")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(alpha_check).encode('utf-8') in robo_response.data)
def test_random_offset_learnings(self):
''' An offset group of learnings are returned randomized
'''
# set some values in the database
letters = ["E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S"]
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
# get chronological learnings
robo_response = self.post_command(text="shh learnings 7 4")
self.assertEqual(robo_response.status_code, 200)
control = robo_response.data.decode()
# get a list of the terms from the control string
check_terms = control.split(', ')
check_terms[0] = check_terms[0][-2:]
# get a few random learnings
robo_response = self.post_command(text="shh learnings random 7 4")
self.assertEqual(robo_response.status_code, 200)
random1 = robo_response.data
robo_response = self.post_command(text="shh learnings random 7 4")
self.assertEqual(robo_response.status_code, 200)
random2 = robo_response.data
robo_response = self.post_command(text="shh learnings random 7 4")
self.assertEqual(robo_response.status_code, 200)
random3 = robo_response.data
# if they're all equal, we've failed
self.assertFalse(control == random1 and control == random2 and control == random3)
# but they should all have the same elements
for term in check_terms:
self.assertTrue(term.encode('utf-8') in random1)
self.assertTrue(term.encode('utf-8') in random2)
self.assertTrue(term.encode('utf-8') in random3)
def test_all_learnings(self):
''' All learnings are returned when requested
'''
# set some values in the database
letters = ["E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X"]
check = []
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
check.insert(0, "{}W".format(letter))
# get all learnings
robo_response = self.post_command(text="shh learnings all")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
# if 'all' is part of the command, other limiting params are ignored
robo_response = self.post_command(text="shh learnings all 5")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="shh learnings 5 3 all")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="shh learnings all 3 5")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
def test_some_learnings(self):
''' Only a few learnings are returned when requested
'''
# set some values in the database
letters = ["E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X"]
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
limit = 7
check = ["{}W".format(item) for item in list(reversed(letters[-limit:]))]
# get some learnings
robo_response = self.post_command(text="shh learnings {}".format(limit))
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
def test_offset_learnings(self):
''' An offset of learnings are returned when requested
'''
# set some values in the database
letters = ["E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X"]
for letter in letters:
self.post_command(text="{letter}W = {letter}ligibility Worker".format(letter=letter))
limit = 7
offset = 11
check = ["{}W".format(item) for item in list(reversed(letters[-(limit + offset):-offset]))]
# get some learnings
robo_response = self.post_command(text="shh learnings {} {}".format(limit, offset))
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(", ".join(check).encode('utf-8') in robo_response.data)
def test_learnings_language(self):
''' Language describing learnings is numerically accurate
'''
# ask for recent definitions before any have been set
robo_response = self.post_command(text="shh learnings")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I haven't learned any definitions yet.".encode('utf-8') in robo_response.data)
# when one value has been set
self.post_command(text="EW = Eligibility Worker")
robo_response = self.post_command(text="shh learnings")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I recently learned the definition for".encode('utf-8') in robo_response.data)
# when more than one value has been set
self.post_command(text="FW = Fligibility Worker")
robo_response = self.post_command(text="shh learnings")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I recently learned definitions for".encode('utf-8') in robo_response.data)
def test_learnings_alternate_command(self):
''' Learnings are returned when sending the 'recent' command.
'''
# ask for recent definitions before any have been set
robo_response = self.post_command(text="shh recent")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I haven't learned any definitions yet.".encode('utf-8') in robo_response.data)
# when one value has been set
self.post_command(text="EW = Eligibility Worker")
robo_response = self.post_command(text="shh recent")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I recently learned the definition for".encode('utf-8') in robo_response.data)
# when more than one value has been set
self.post_command(text="FW = Fligibility Worker")
robo_response = self.post_command(text="shh recent")
self.assertEqual(robo_response.status_code, 200)
self.assertTrue("I recently learned definitions for".encode('utf-8') in robo_response.data)
@responses.activate
def test_learnings_alternate_command_echoed(self):
''' The learnings alternate command is echoed in the bot's reponse
'''
alternate_action = "recent"
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text=alternate_action)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue("gloss {action}".format(action=alternate_action) in payload['text'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
def test_get_help(self):
''' Help is properly returned by the bot
'''
# testing different chunks of help text with each response
robo_response = self.post_command(text="help")
self.assertTrue("to show the definition for a term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="?")
self.assertTrue("to set the definition for a term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="")
self.assertTrue("to delete the definition for a term".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text=" ")
self.assertTrue("to see this message".encode('utf-8') in robo_response.data)
def test_custom_slash_command_for_private_requests(self):
''' A slash command other than /gloss is echoed in the bot's response
'''
test_command = "/gg"
# the help command
robo_response = self.post_command(text="help", slash_command=test_command)
self.assertTrue("*{}".format(test_command).encode('utf-8') in robo_response.data)
self.assertFalse("*/gloss".encode('utf-8') in robo_response.data)
# ask for a definition that doesn't exist
robo_response = self.post_command(text="shh EW", slash_command=test_command)
self.assertTrue("*{}".format(test_command).encode('utf-8') in robo_response.data)
self.assertFalse("*/gloss".encode('utf-8') in robo_response.data)
# get a definition that does exist
self.post_command(text="EW = Eligibility Worker", slash_command=test_command)
robo_response = self.post_command(text="shh EW", slash_command=test_command)
self.assertTrue("{}".format(test_command).encode('utf-8') in robo_response.data)
self.assertFalse("/gloss".encode('utf-8') in robo_response.data)
# get the error message for a bogus set
robo_response = self.post_command(text="AW =", slash_command=test_command)
self.assertTrue("*{}".format(test_command).encode('utf-8') in robo_response.data)
self.assertFalse("*/gloss".encode('utf-8') in robo_response.data)
@responses.activate
def test_custom_slash_command_for_public_stats(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public stats request.
'''
test_command = "/gg"
# set and get a definition to generate some stats
self.post_command(text="EW = Eligibility Worker")
self.post_command(text="shh EW")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="stats", slash_command=test_command)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue("{command} stats".format(command=test_command) in payload['text'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_custom_slash_command_for_public_definition(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public definition request.
'''
test_command = "/gg"
# set and get a definition to generate some stats
self.post_command(text="EW = Eligibility Worker")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="EW", slash_command=test_command)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue("{command} EW".format(command=test_command) in payload['text'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
@responses.activate
def test_custom_slash_command_for_public_learnings(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public learnings request.
'''
test_command = "/gg"
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="learnings", slash_command=test_command)
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue("{command} learnings".format(command=test_command) in payload['text'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
def test_get_quiet_definition(self):
''' The bot will send a quiet definition when told to do so
'''
# set & test a definition
self.post_command(text="EW = Eligibility Worker")
filter = Definition.term == "EW"
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, "EW")
self.assertEqual(definition_check.definition, "Eligibility Worker")
# send a POST to the bot to request the quiet definition
robo_response = self.post_command(text="shh EW")
self.assertTrue("glossie".encode('utf-8') in robo_response.data)
self.assertTrue("EW: Eligibility Worker".encode('utf-8') in robo_response.data)
# send POSTs with variations of 'shh' to make sure that they're caught
robo_response = self.post_command(text="ssh EW")
self.assertTrue("glossie".encode('utf-8') in robo_response.data)
self.assertTrue("EW: Eligibility Worker".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="sh EW")
self.assertTrue("glossie".encode('utf-8') in robo_response.data)
self.assertTrue("EW: Eligibility Worker".encode('utf-8') in robo_response.data)
# at least one request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, "glossie")
self.assertEqual(interaction_check.term, "EW")
self.assertEqual(interaction_check.action, "found")
def test_bad_set_commands(self):
''' We get the right error back when sending bad set commands
'''
robo_response = self.post_command(text="EW =")
self.assertTrue("You can set definitions like this".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="=")
self.assertTrue("You can set definitions like this".encode('utf-8') in robo_response.data)
robo_response = self.post_command(text="= = =")
self.assertTrue("You can set definitions like this".encode('utf-8') in robo_response.data)
@responses.activate
def test_bad_image_urls_rejected(self):
''' Bad image URLs are not sent in the attachment's image_url parameter
'''
# set some definitions with bad image URLs
self.post_command(text="EW = http://kittens.gif")
self.post_command(text="FW = httpdoggie.jpeg")
self.post_command(text="GW = http://stupid/goldfish.bmp")
self.post_command(text="HW = http://s.mlkshk-cdn.com/r/13ILU")
# set a fake Slack webhook URL
fake_webhook_url = 'http://webhook.example.com/'
current_app.config['SLACK_WEBHOOK_URL'] = fake_webhook_url
# create a mock to receive POST requests to that URL
responses.add(responses.POST, fake_webhook_url, status=200)
rsp = self.post_command(text="EW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[0].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNone(attachment['image_url'])
rsp = self.post_command(text="FW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[1].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNone(attachment['image_url'])
rsp = self.post_command(text="GW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[2].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNone(attachment['image_url'])
rsp = self.post_command(text="HW")
self.assertTrue(rsp.status_code in range(200, 299), rsp.status_code)
# test the captured post payload
payload = json.loads(responses.calls[3].request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNone(attachment['image_url'])
# delete the fake Slack webhook URL
del(current_app.config['SLACK_WEBHOOK_URL'])
# reset the mock
responses.reset()
if __name__ == '__main__':
unittest.main()
| 45.478088
| 134
| 0.665572
| 5,684
| 45,660
| 5.215517
| 0.071605
| 0.062742
| 0.052117
| 0.066015
| 0.876607
| 0.843178
| 0.828369
| 0.801181
| 0.790184
| 0.771665
| 0
| 0.011514
| 0.218244
| 45,660
| 1,003
| 135
| 45.52343
| 0.818406
| 0.144525
| 0
| 0.695513
| 0
| 0.001603
| 0.148803
| 0
| 0
| 0
| 0
| 0
| 0.456731
| 1
| 0.060897
| false
| 0
| 0.009615
| 0
| 0.072115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c621db6f299b6cb96ec4fa5bbc64f0f5ed82555
| 3,625
|
py
|
Python
|
BaseStation/tests/replay/test_playback_state.py
|
ul-gaul/Avionique_Software
|
e131936de4878d6b71a7168de2212bd9a3a507c7
|
[
"MIT"
] | 3
|
2017-10-17T22:56:17.000Z
|
2019-02-04T17:23:07.000Z
|
BaseStation/tests/replay/test_playback_state.py
|
ul-gaul/Avionique_Software
|
e131936de4878d6b71a7168de2212bd9a3a507c7
|
[
"MIT"
] | 64
|
2016-12-05T23:34:20.000Z
|
2020-10-05T23:57:43.000Z
|
BaseStation/tests/replay/test_playback_state.py
|
ul-gaul/Avionique_Software
|
e131936de4878d6b71a7168de2212bd9a3a507c7
|
[
"MIT"
] | 3
|
2017-01-11T06:14:14.000Z
|
2020-10-05T20:57:37.000Z
|
import unittest
from src.replay.playback_state import PlaybackState
class PlaybackStateTest(unittest.TestCase):
def test_fast_forward_should_double_speed_when_fast_forwarding(self):
initial_speed = 1.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.FORWARD)
playback_state.fast_forward()
self.assertEqual(playback_state.get_speed(), initial_speed * 2)
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.FORWARD)
def test_fast_forward_should_set_mode_forward_when_rewinding_at_normal_speed(self):
initial_speed = 1.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.BACKWARD)
playback_state.fast_forward()
self.assertEqual(playback_state.get_speed(), initial_speed)
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.FORWARD)
def test_fast_forward_should_halve_speed_when_fast_rewinding(self):
initial_speed = 2.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.BACKWARD)
playback_state.fast_forward()
self.assertEqual(playback_state.get_speed(), initial_speed / 2)
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.BACKWARD)
def test_fast_forward_should_not_accelerate_beyond_max_speed(self):
initial_speed = PlaybackState.max_speed_factor
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.FORWARD)
playback_state.fast_forward()
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.FORWARD)
self.assertEqual(playback_state.get_speed(), PlaybackState.max_speed_factor)
def test_rewind_should_double_speed_when_rewinding(self):
initial_speed = 1.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.BACKWARD)
playback_state.rewind()
self.assertEqual(playback_state.get_speed(), initial_speed * 2)
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.BACKWARD)
def test_rewind_should_set_mode_backward_when_fast_forwarding_at_normal_speed(self):
initial_speed = 1.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.FORWARD)
playback_state.rewind()
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.BACKWARD)
self.assertEqual(playback_state.get_speed(), initial_speed)
def test_rewind_should_halve_speed_when_fast_forwarding(self):
initial_speed = 2.0
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.FORWARD)
playback_state.rewind()
self.assertEqual(playback_state.get_speed(), initial_speed / 2)
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.FORWARD)
def test_rewind_should_not_accelerate_beyond_max_speed(self):
initial_speed = PlaybackState.max_speed_factor
playback_state = PlaybackState(initial_speed, PlaybackState.Mode.BACKWARD)
playback_state.rewind()
self.assertEqual(playback_state.get_mode(), PlaybackState.Mode.BACKWARD)
self.assertEqual(playback_state.get_speed(), PlaybackState.max_speed_factor)
def test_reset_should_set_mode_forward(self):
playback_state = PlaybackState(mode=PlaybackState.Mode.BACKWARD)
playback_state.reset()
self.assertTrue(playback_state.is_going_forward())
def test_reset_should_set_normal_speed(self):
playback_state = PlaybackState(speed_factor=PlaybackState.max_speed_factor)
playback_state.reset()
self.assertEqual(playback_state.get_speed(), 1)
| 38.978495
| 88
| 0.759448
| 427
| 3,625
| 6.046838
| 0.103045
| 0.196359
| 0.151433
| 0.184353
| 0.849729
| 0.796669
| 0.767235
| 0.749419
| 0.744771
| 0.733927
| 0
| 0.00559
| 0.161103
| 3,625
| 92
| 89
| 39.402174
| 0.843473
| 0
| 0
| 0.711864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.305085
| 1
| 0.169492
| false
| 0
| 0.033898
| 0
| 0.220339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c6c6d9506e3755448fcd40de20a2e6510b50db8
| 755
|
py
|
Python
|
tests/lib/cast/test_to_array.py
|
bogdanvuk/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 120
|
2018-04-23T08:29:04.000Z
|
2022-03-30T14:41:52.000Z
|
tests/lib/cast/test_to_array.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-07-09T17:12:58.000Z
|
2022-03-18T09:05:10.000Z
|
tests/lib/cast/test_to_array.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-05-10T19:42:08.000Z
|
2022-03-28T18:26:44.000Z
|
import pytest
from pygears.typing import Array, Tuple, Uint, cast
def test_tuple_type_cast():
assert cast(Tuple[Uint[4], Uint[4], Uint[4]], Array) == Array[Uint[4], 3]
with pytest.raises(TypeError):
cast(Tuple[Uint[4], Uint[5], Uint[6]], Array)
assert cast(Tuple[Uint[6], Uint[4], Uint[4]], Array) == Array[Uint[6], 3]
assert cast(Tuple[Uint[4], Uint[4], Uint[4]],
Array[Uint[6]]) == Array[Uint[6], 3]
with pytest.raises(TypeError):
cast(Tuple[Uint[4], Uint[4], Uint[4]], Array[Uint[2]])
assert cast(Tuple[Uint[4], Uint[4], Uint[4]],
Array[Uint[6], 3]) == Array[Uint[6], 3]
with pytest.raises(TypeError):
cast(Tuple[Uint[4], Uint[4], Uint[4]], Array[Uint[4], 2])
| 31.458333
| 77
| 0.588079
| 120
| 755
| 3.675
| 0.175
| 0.226757
| 0.244898
| 0.249433
| 0.748299
| 0.748299
| 0.748299
| 0.673469
| 0.673469
| 0.673469
| 0
| 0.058333
| 0.205298
| 755
| 23
| 78
| 32.826087
| 0.676667
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 1
| 0.066667
| true
| 0
| 0.133333
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c6c773ba8780811c991c6692d418ce648d6eae0
| 18,007
|
py
|
Python
|
sdk/python/pulumi_okta/event_hook.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-10-29T21:59:22.000Z
|
2021-11-08T12:00:24.000Z
|
sdk/python/pulumi_okta/event_hook.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2020-01-06T10:28:09.000Z
|
2022-03-25T19:52:40.000Z
|
sdk/python/pulumi_okta/event_hook.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-09-11T16:31:04.000Z
|
2020-11-24T12:23:17.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['EventHookArgs', 'EventHook']
@pulumi.input_type
class EventHookArgs:
def __init__(__self__, *,
channel: pulumi.Input[Mapping[str, pulumi.Input[str]]],
events: pulumi.Input[Sequence[pulumi.Input[str]]],
auth: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a EventHook resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] channel: Details of the endpoint the event hook will hit.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events: The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] auth: Authentication required for event hook request.
:param pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]] headers: Map of headers to send along in event hook request.
:param pulumi.Input[str] name: The event hook display name.
"""
pulumi.set(__self__, "channel", channel)
pulumi.set(__self__, "events", events)
if auth is not None:
pulumi.set(__self__, "auth", auth)
if headers is not None:
pulumi.set(__self__, "headers", headers)
if name is not None:
pulumi.set(__self__, "name", name)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def channel(self) -> pulumi.Input[Mapping[str, pulumi.Input[str]]]:
"""
Details of the endpoint the event hook will hit.
"""
return pulumi.get(self, "channel")
@channel.setter
def channel(self, value: pulumi.Input[Mapping[str, pulumi.Input[str]]]):
pulumi.set(self, "channel", value)
@property
@pulumi.getter
def events(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
"""
return pulumi.get(self, "events")
@events.setter
def events(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "events", value)
@property
@pulumi.getter
def auth(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Authentication required for event hook request.
"""
return pulumi.get(self, "auth")
@auth.setter
def auth(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "auth", value)
@property
@pulumi.getter
def headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]]:
"""
Map of headers to send along in event hook request.
"""
return pulumi.get(self, "headers")
@headers.setter
def headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]]):
pulumi.set(self, "headers", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The event hook display name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class _EventHookState:
def __init__(__self__, *,
auth: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
channel: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
events: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering EventHook resources.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] auth: Authentication required for event hook request.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] channel: Details of the endpoint the event hook will hit.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events: The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
:param pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]] headers: Map of headers to send along in event hook request.
:param pulumi.Input[str] name: The event hook display name.
"""
if auth is not None:
pulumi.set(__self__, "auth", auth)
if channel is not None:
pulumi.set(__self__, "channel", channel)
if events is not None:
pulumi.set(__self__, "events", events)
if headers is not None:
pulumi.set(__self__, "headers", headers)
if name is not None:
pulumi.set(__self__, "name", name)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def auth(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Authentication required for event hook request.
"""
return pulumi.get(self, "auth")
@auth.setter
def auth(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "auth", value)
@property
@pulumi.getter
def channel(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Details of the endpoint the event hook will hit.
"""
return pulumi.get(self, "channel")
@channel.setter
def channel(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "channel", value)
@property
@pulumi.getter
def events(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
"""
return pulumi.get(self, "events")
@events.setter
def events(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "events", value)
@property
@pulumi.getter
def headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]]:
"""
Map of headers to send along in event hook request.
"""
return pulumi.get(self, "headers")
@headers.setter
def headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EventHookHeaderArgs']]]]):
pulumi.set(self, "headers", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The event hook display name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class EventHook(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auth: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
channel: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
events: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventHookHeaderArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates an event hook.
This resource allows you to create and configure an event hook.
## Example Usage
```python
import pulumi
import pulumi_okta as okta
example = okta.EventHook("example",
auth={
"key": "Authorization",
"type": "HEADER",
"value": "123",
},
channel={
"type": "HTTP",
"uri": "https://example.com/test",
"version": "1.0.0",
},
events=[
"user.lifecycle.create",
"user.lifecycle.delete.initiated",
])
```
## Import
An event hook can be imported via the Okta ID.
```sh
$ pulumi import okta:index/eventHook:EventHook example <hook id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] auth: Authentication required for event hook request.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] channel: Details of the endpoint the event hook will hit.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events: The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventHookHeaderArgs']]]] headers: Map of headers to send along in event hook request.
:param pulumi.Input[str] name: The event hook display name.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EventHookArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates an event hook.
This resource allows you to create and configure an event hook.
## Example Usage
```python
import pulumi
import pulumi_okta as okta
example = okta.EventHook("example",
auth={
"key": "Authorization",
"type": "HEADER",
"value": "123",
},
channel={
"type": "HTTP",
"uri": "https://example.com/test",
"version": "1.0.0",
},
events=[
"user.lifecycle.create",
"user.lifecycle.delete.initiated",
])
```
## Import
An event hook can be imported via the Okta ID.
```sh
$ pulumi import okta:index/eventHook:EventHook example <hook id>
```
:param str resource_name: The name of the resource.
:param EventHookArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EventHookArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auth: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
channel: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
events: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventHookHeaderArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EventHookArgs.__new__(EventHookArgs)
__props__.__dict__["auth"] = auth
if channel is None and not opts.urn:
raise TypeError("Missing required property 'channel'")
__props__.__dict__["channel"] = channel
if events is None and not opts.urn:
raise TypeError("Missing required property 'events'")
__props__.__dict__["events"] = events
__props__.__dict__["headers"] = headers
__props__.__dict__["name"] = name
__props__.__dict__["status"] = status
super(EventHook, __self__).__init__(
'okta:index/eventHook:EventHook',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
auth: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
channel: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
events: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
headers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventHookHeaderArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'EventHook':
"""
Get an existing EventHook resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] auth: Authentication required for event hook request.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] channel: Details of the endpoint the event hook will hit.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events: The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventHookHeaderArgs']]]] headers: Map of headers to send along in event hook request.
:param pulumi.Input[str] name: The event hook display name.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EventHookState.__new__(_EventHookState)
__props__.__dict__["auth"] = auth
__props__.__dict__["channel"] = channel
__props__.__dict__["events"] = events
__props__.__dict__["headers"] = headers
__props__.__dict__["name"] = name
__props__.__dict__["status"] = status
return EventHook(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def auth(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Authentication required for event hook request.
"""
return pulumi.get(self, "auth")
@property
@pulumi.getter
def channel(self) -> pulumi.Output[Mapping[str, str]]:
"""
Details of the endpoint the event hook will hit.
"""
return pulumi.get(self, "channel")
@property
@pulumi.getter
def events(self) -> pulumi.Output[Sequence[str]]:
"""
The events that will be delivered to this hook. [See here for a list of supported events](https://developer.okta.com/docs/reference/api/event-types/?q=event-hook-eligible).
"""
return pulumi.get(self, "events")
@property
@pulumi.getter
def headers(self) -> pulumi.Output[Optional[Sequence['outputs.EventHookHeader']]]:
"""
Map of headers to send along in event hook request.
"""
return pulumi.get(self, "headers")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The event hook display name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "status")
| 40.925
| 237
| 0.617871
| 2,078
| 18,007
| 5.213667
| 0.089509
| 0.131992
| 0.08141
| 0.050397
| 0.833395
| 0.814288
| 0.788628
| 0.76906
| 0.763615
| 0.761769
| 0
| 0.000978
| 0.261454
| 18,007
| 439
| 238
| 41.018223
| 0.81367
| 0.327206
| 0
| 0.72766
| 1
| 0
| 0.071526
| 0.004817
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157447
| false
| 0.004255
| 0.029787
| 0.012766
| 0.280851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c760dd5eccb71695cd51063826788a4978c5a61
| 176
|
py
|
Python
|
plugins/checkdmarc/icon_checkdmarc/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/checkdmarc/icon_checkdmarc/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/checkdmarc/icon_checkdmarc/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .check_domains.action import CheckDomains
from .check_domains_alternate_nameservers.action import CheckDomainsAlternateNameservers
| 44
| 88
| 0.869318
| 21
| 176
| 7.095238
| 0.761905
| 0.120805
| 0.214765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096591
| 176
| 3
| 89
| 58.666667
| 0.937107
| 0.210227
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c9cb6f9f6c27e46815710c307595f67692f16b9
| 10,220
|
py
|
Python
|
esp8266/clock/dictionary.py
|
JiangYangJie/Embedded
|
70dba3a1e5c1fb7b9a7d8b633a5fc05138894456
|
[
"MIT"
] | 1
|
2019-07-23T07:14:07.000Z
|
2019-07-23T07:14:07.000Z
|
esp8266/clock/dictionary.py
|
JiangYangJie/Embedded
|
70dba3a1e5c1fb7b9a7d8b633a5fc05138894456
|
[
"MIT"
] | null | null | null |
esp8266/clock/dictionary.py
|
JiangYangJie/Embedded
|
70dba3a1e5c1fb7b9a7d8b633a5fc05138894456
|
[
"MIT"
] | 2
|
2019-07-22T11:42:55.000Z
|
2019-12-15T01:43:19.000Z
|
dicts={
0xe88f9c:
[0x00,0x00,0x00,0x00,0x00,0x3F,0x00,0x00,0x00,0x00,0x07,0x00,0x02,0x01,0x00,0x00,0x00,0x00,0x7F,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x0C,0x30,0x00,0x00,
0x00,0x00,0x1C,0x18,0x18,0xFF,0x18,0x18,0x10,0x00,0xFF,0x02,0x01,0x81,0xC0,0xC0,0x41,0x01,0xFF,0x07,0x0D,0x19,0x31,0x61,0xC1,0x01,0x01,0x01,0x01,0x00,
0x00,0x00,0x38,0x30,0x30,0xFF,0x30,0x30,0x07,0xFF,0x00,0x01,0x81,0xC3,0xC2,0x86,0xCC,0x88,0xFF,0xA0,0xA0,0x90,0x8C,0x86,0x83,0x81,0x80,0x80,0x00,0x00,
0x00,0x00,0x00,0x00,0x38,0xFC,0x00,0x80,0xC0,0xC0,0x00,0x80,0xC0,0x00,0x00,0x00,0x00,0x18,0xFC,0x00,0x00,0x00,0x00,0x00,0x80,0xF0,0x7C,0x10,0x00,0x00],#/*"菜",0*/
0xe58d95:
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x00,0x00,0x3F,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x40,0x20,0x38,0x18,0x0C,0x08,0xFF,0x01,0x01,0x01,0x01,0xFF,0x01,0x01,0x01,0x01,0xFF,0x01,0x01,0x01,0xFF,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x00,
0x00,0x00,0x0C,0x0C,0x18,0x10,0x20,0xFF,0x80,0x80,0x80,0x80,0xFF,0x80,0x80,0x80,0x80,0xFF,0x80,0x80,0x80,0xFF,0x80,0x80,0x80,0x80,0x80,0x80,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0x80,0x80,0x80,0x80,0x80,0x80,0x80,0x80,0x80,0x80,0x00,0x00,0x38,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"单",1*/
0xe697b6:
[0x00,0x00,0x00,0x00,0x00,0x10,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x18,0x18,0x10,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x30,0xF0,0x30,0x30,0x3F,0x30,0x30,0x30,0xF3,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0xF0,0x30,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,0x03,0x03,0x03,0x03,0x83,0xC3,0xE3,0x63,0x43,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x3F,0x06,0x04,0x00,
0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x00,0x18,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"时",0*/
0xe9929f:
[0x00,0x00,0x03,0x03,0x03,0x06,0x06,0x05,0x0C,0x08,0x08,0x1F,0x13,0x23,0x43,0x03,0x03,0x3F,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x01,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x18,0xE0,0x01,0x01,0x11,0xF9,0x01,0x01,0x01,0x01,0x19,0xE1,0x01,0x00,0x00,0x08,0x10,0x60,0xC0,0x80,0x00,0x00,0x00,0x00,
0x00,0x00,0x06,0x06,0x06,0x06,0x06,0x06,0x06,0xFF,0x86,0x86,0x86,0x86,0x86,0x86,0xFF,0x86,0x06,0x06,0x06,0x06,0x06,0x06,0x06,0x06,0x06,0x06,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0xF8,0x10,0x10,0x10,0x10,0x10,0x10,0xF0,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"钟",1*/
0xe997b9:
[0x00,0x00,0x01,0x00,0x00,0x00,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x08,0x00,
0x00,0x00,0x00,0xC0,0xE7,0x60,0x42,0x03,0x01,0x00,0xFF,0x01,0x01,0x01,0x7F,0x61,0x61,0x61,0x61,0x61,0x61,0x61,0x61,0x01,0x01,0x01,0x01,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0xFF,0x00,0x00,0x80,0x80,0x82,0xFF,0x80,0x80,0x80,0xFE,0x86,0x86,0x86,0x86,0x86,0x86,0xBC,0x8C,0x80,0x80,0x80,0x03,0x00,0x00,0x00,
0x00,0x00,0x00,0x30,0xF0,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0xE0,0xE0,0x00,0x00],#/*"闹",0*/
'30':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x03,0x07,0x0F,0x0F,0x1F,0x1F,0x1F,0x1F,0x1F,0x1F,0x1F,0x0F,0x0F,0x07,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x07,0x7C,0xF0,0xC0,0x80,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0xC0,0xE0,0x7C,0x07,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xE0,0x1E,0x07,0x03,0x01,0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x01,0x01,0x03,0x07,0x1E,0xE0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0xE0,0xE0,0xF0,0xF0,0xF8,0xF8,0xF8,0xF8,0xF8,0xF0,0xF0,0xE0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"0",0*/
'31':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x01,0xFF,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x07,0xFF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x40,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xF0,0xFF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0x00,0x00,0x00,0x00],#/*"1",1*/
'32':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x07,0x0F,0x0F,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x0C,0x1F,0x1F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0F,0xF0,0x80,0x00,0x00,0xC0,0xC0,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x1C,0x70,0xC0,0x00,0x00,0xFF,0xFF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xF0,0x1F,0x03,0x01,0x01,0x01,0x01,0x03,0x07,0x0F,0x1C,0x70,0xC0,0x00,0x00,0x00,0x00,0x00,0x01,0xFF,0xFF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0xE0,0xE0,0xE0,0xE0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x30,0x60,0xE0,0xE0,0xE0,0x00,0x00,0x00,0x00],#/*"2",2*/
'33':
[0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x07,0x07,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x0F,0x0F,0x07,0x01,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x1F,0xE0,0x80,0x80,0x80,0x80,0x00,0x00,0x00,0x0F,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0x80,0x80,0xE0,0x1F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xE0,0x3E,0x0F,0x07,0x03,0x03,0x03,0x07,0x1E,0xE0,0x7C,0x07,0x03,0x01,0x01,0x01,0x01,0x01,0x03,0x1E,0xE0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xC0,0xC0,0xC0,0xC0,0x80,0x00,0x00,0x00,0x80,0xE0,0xE0,0xF0,0xF0,0xF0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00],#/*"3",3*/
'34':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x0C,0x18,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x0C,0x18,0x70,0xC0,0x80,0x00,0x00,0x00,0xFF,0x00,0x00,0x00,0x00,0x00,0x0F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0E,0x1E,0x7E,0xDE,0x9E,0x1E,0x1E,0x1E,0x1E,0x1E,0x1E,0x1E,0x1E,0x1E,0xFF,0x1E,0x1E,0x1E,0x1E,0x3F,0xFF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xF8,0x00,0x00,0x00,0x00,0x00,0xF8,0x00,0x00,0x00,0x00],#/*"4",4*/
'35':
[0x00,0x00,0x00,0x00,0x00,0x01,0x01,0x01,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x00,0x00,0x00,0x00,0x0F,0x0F,0x0F,0x07,0x01,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x3F,0xE0,0x80,0x00,0x00,0x00,0x00,0x80,0x80,0x00,0x00,0xE0,0x1F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0xFE,0x07,0x03,0x01,0x00,0x00,0x00,0x00,0x01,0x01,0x07,0x1F,0xF0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xE0,0xE0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xE0,0xE0,0xF0,0xF0,0xF0,0xF0,0xE0,0xE0,0x80,0x00,0x00,0x00,0x00,0x00,0x00],#/*"5",5*/
'36':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x03,0x07,0x0F,0x0F,0x1F,0x1F,0x1F,0x1F,0x1F,0x1F,0x1F,0x0F,0x07,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x01,0x3E,0xE0,0xC0,0x80,0x80,0x00,0x00,0x0F,0x78,0xC0,0x80,0x00,0x00,0x00,0x00,0x80,0xC0,0xE0,0x78,0x07,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xFC,0x07,0x07,0x07,0x00,0x00,0x00,0x00,0xFF,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0xF8,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xC0,0xC0,0x00,0x00,0x00,0x00,0x00,0xC0,0xE0,0xF0,0xF8,0xF8,0xF8,0xF8,0xF0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00],#/*"6",6*/
'37':
[0x00,0x00,0x00,0x00,0x00,0x03,0x07,0x07,0x06,0x0C,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x03,0x07,0x07,0x0F,0x0F,0x1F,0x1F,0x1F,0x07,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x01,0x03,0x06,0x0C,0x18,0x30,0x70,0xE0,0xC0,0xC0,0x80,0x80,0x80,0x80,0x80,0x80,0x80,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xF0,0xF0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"7",7*/
'38':
[0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x0F,0x0E,0x0F,0x0F,0x07,0x01,0x00,0x00,0x03,0x07,0x0E,0x1C,0x1C,0x1C,0x0E,0x07,0x01,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0F,0xF0,0x80,0x00,0x00,0x00,0x80,0xE0,0xFC,0x3F,0x77,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xE0,0x1F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xF0,0x0F,0x01,0x00,0x00,0x00,0x00,0x01,0x07,0xF8,0xFC,0x3F,0x07,0x01,0x00,0x00,0x00,0x00,0x01,0x0F,0xF0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0xE0,0xF0,0xF0,0xE0,0xC0,0x00,0x00,0x00,0x80,0xC0,0xE0,0xF0,0xF0,0xF0,0xE0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00],#/*"8",8*/
'39':
[0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x0F,0x1E,0x1E,0x1E,0x1E,0x1E,0x1F,0x0F,0x03,0x00,0x00,0x00,0x00,0x00,0x07,0x07,0x03,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x1F,0xE0,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xE0,0xFF,0x00,0x00,0x00,0x00,0xC0,0xC0,0xE0,0x7F,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0xE0,0x1E,0x07,0x03,0x01,0x00,0x00,0x00,0x01,0x03,0x06,0x38,0xE1,0x01,0x01,0x03,0x03,0x07,0x1E,0x78,0x80,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xC0,0xE0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xE0,0xC0,0xC0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*"9",9*/
'20':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],#/*" ",1*/
'3a':
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0x40,0x00,0x00,0x00,0x00]#/*":",1*/
}
| 116.136364
| 162
| 0.773092
| 2,000
| 10,220
| 3.9505
| 0.051
| 0.921402
| 1.186179
| 1.346665
| 0.866599
| 0.805974
| 0.762435
| 0.692571
| 0.652576
| 0.543729
| 0
| 0.544958
| 0.015166
| 10,220
| 88
| 163
| 116.136364
| 0.24004
| 0.014971
| 0
| 0.02381
| 0
| 0
| 0.002388
| 0
| 0
| 1
| 0.780022
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.